Merged hyracks_asterix_stabilization upto rev 1913
git-svn-id: https://hyracks.googlecode.com/svn/trunk@1924 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
index 6655844..607d13a 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -33,12 +33,6 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-control-nc</artifactId>
- <version>0.2.1-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-btree</artifactId>
<version>0.2.1-SNAPSHOT</version>
<type>jar</type>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java
new file mode 100644
index 0000000..11c47c7
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeBulkLoadTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexBulkLoadTest;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class BTreeBulkLoadTest extends OrderedIndexBulkLoadTest {
+
+ public BTreeBulkLoadTest() {
+ super(BTreeTestHarness.LEAF_FRAMES_TO_TEST, 1);
+ }
+
+ private final BTreeTestHarness harness = new BTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+ BTreeLeafFrameType leafType) throws Exception {
+ return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+ leafType);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java
new file mode 100644
index 0000000..0205540
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeDeleteTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexDeleteTest;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class BTreeDeleteTest extends OrderedIndexDeleteTest {
+
+ public BTreeDeleteTest() {
+ super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+ }
+
+ private final BTreeTestHarness harness = new BTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+ BTreeLeafFrameType leafType) throws Exception {
+ return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+ leafType);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
index 5bb86b9..f4f8b12 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeExamplesTest.java
@@ -15,610 +15,38 @@
package edu.uci.ics.hyracks.storage.am.btree;
-import java.util.Random;
-import java.util.logging.Level;
+import org.junit.After;
+import org.junit.Before;
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexExamplesTest;
import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-@SuppressWarnings("rawtypes")
-public class BTreeExamplesTest extends AbstractBTreeTest {
+public class BTreeExamplesTest extends OrderedIndexExamplesTest {
+ private final BTreeTestHarness harness = new BTreeTestHarness();
- /**
- * Fixed-Length Key,Value Example.
- *
- * Create a BTree with one fixed-length key field and one fixed-length value
- * field. Fill BTree with random values using insertions (not bulk load).
- * Perform scans and range search.
- */
- @Test
- public void fixedLengthKeyValueExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Fixed-Length Key,Value Example.");
- }
-
- // Declare fields.
- int fieldCount = 2;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = IntegerPointable.TYPE_TRAITS;
- typeTraits[1] = IntegerPointable.TYPE_TRAITS;
- // Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
-
- // Declare keys.
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
-
- BTree btree = BTreeUtils
- .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
- btree.create(btreeFileId);
- btree.open(btreeFileId);
-
- long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Inserting into tree...");
- }
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor();
- int numInserts = 10000;
- for (int i = 0; i < numInserts; i++) {
- int f0 = rnd.nextInt() % numInserts;
- int f1 = 5;
- TupleUtils.createIntegerTuple(tb, tuple, f0, f1);
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("Inserting " + i + " : " + f0 + " " + f1);
- }
- }
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- }
- }
- long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
- }
-
- orderedScan(btree, indexAccessor, fieldSerdes);
- diskOrderScan(btree, indexAccessor, fieldSerdes);
-
- // Build low key.
- ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(keyFieldCount);
- ArrayTupleReference lowKey = new ArrayTupleReference();
- TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -1000);
-
- // Build high key.
- ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(keyFieldCount);
- ArrayTupleReference highKey = new ArrayTupleReference();
- TupleUtils.createIntegerTuple(highKeyTb, highKey, 1000);
-
- rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
-
- btree.close();
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
}
- /**
- * Composite Key Example (Non-Unique B-Tree).
- *
- * Create a BTree with two fixed-length key fields and one fixed-length
- * value field. Fill BTree with random values using insertions (not bulk
- * load) Perform scans and range search.
- */
- @Test
- public void twoFixedLengthKeysOneFixedLengthValueExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Composite Key Test");
- }
-
- // Declare fields.
- int fieldCount = 3;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = IntegerPointable.TYPE_TRAITS;
- typeTraits[1] = IntegerPointable.TYPE_TRAITS;
- typeTraits[2] = IntegerPointable.TYPE_TRAITS;
- // Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
- cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
-
- BTree btree = BTreeUtils
- .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
- btree.create(btreeFileId);
- btree.open(btreeFileId);
-
- long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Inserting into tree...");
- }
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor();
- int numInserts = 10000;
- for (int i = 0; i < 10000; i++) {
- int f0 = rnd.nextInt() % 2000;
- int f1 = rnd.nextInt() % 1000;
- int f2 = 5;
- TupleUtils.createIntegerTuple(tb, tuple, f0, f1, f2);
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("Inserting " + i + " : " + f0 + " " + f1 + " " + f2);
- }
- }
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- }
- }
- long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
- }
-
- orderedScan(btree, indexAccessor, fieldSerdes);
- diskOrderScan(btree, indexAccessor, fieldSerdes);
-
- // Build low key.
- ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
- ArrayTupleReference lowKey = new ArrayTupleReference();
- TupleUtils.createIntegerTuple(lowKeyTb, lowKey, -3);
-
- // Build high key.
- ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
- ArrayTupleReference highKey = new ArrayTupleReference();
- TupleUtils.createIntegerTuple(highKeyTb, highKey, 3);
-
- // Prefix-Range search in [-3, 3]
- rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
-
- btree.close();
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
}
-
- /**
- * Variable-Length Example. Create a BTree with one variable-length key
- * field and one variable-length value field. Fill BTree with random values
- * using insertions (not bulk load) Perform ordered scans and range search.
- */
- @Test
- public void varLenKeyValueExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Variable-Length Key,Value Example");
- }
-
- // Declare fields.
- int fieldCount = 2;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
- typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
- // Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE };
-
- // Declare keys.
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
-
- BTree btree = BTreeUtils
- .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
- btree.create(btreeFileId);
- btree.open(btreeFileId);
-
- long start = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Inserting into tree...");
- }
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor();
- // Max string length to be generated.
- int maxLength = 10;
- int numInserts = 10000;
- for (int i = 0; i < 10000; i++) {
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("Inserting " + f0 + " " + f1);
- }
- }
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- }
- }
- long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(numInserts + " inserts in " + (end - start) + "ms");
- }
-
- orderedScan(btree, indexAccessor, fieldSerdes);
- diskOrderScan(btree, indexAccessor, fieldSerdes);
-
- // Build low key.
- ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
- ArrayTupleReference lowKey = new ArrayTupleReference();
- TupleUtils.createTuple(lowKeyTb, lowKey, fieldSerdes, "cbf");
-
- // Build high key.
- ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
- ArrayTupleReference highKey = new ArrayTupleReference();
- TupleUtils.createTuple(highKeyTb, highKey, fieldSerdes, "cc7");
-
- rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
-
- btree.close();
+
+ protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories) throws TreeIndexException {
+ return BTreeUtils.createBTree(harness.getBufferCache(), harness.getOpCallback(), typeTraits, cmpFactories,
+ BTreeLeafFrameType.REGULAR_NSM);
}
-
- /**
- * Deletion Example.
- *
- * Create a BTree with one variable-length key field and one variable-length
- * value field. Fill B-tree with random values using insertions, then delete
- * entries one-by-one. Repeat procedure a few times on same BTree.
- */
- @Test
- public void deleteExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Deletion Example");
- }
-
- // Declare fields.
- int fieldCount = 2;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
- typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
- // Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE };
-
- // Declare keys.
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
-
- BTree btree = BTreeUtils
- .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
- btree.create(btreeFileId);
- btree.open(btreeFileId);
-
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference tuple = new ArrayTupleReference();
- ITreeIndexAccessor indexAccessor = btree.createAccessor();
- // Max string length to be generated.
- int runs = 3;
- for (int run = 0; run < runs; run++) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Deletion example run: " + (run + 1) + "/" + runs);
- LOGGER.info("Inserting into tree...");
- }
- int maxLength = 10;
- int ins = 10000;
- String[] f0s = new String[ins];
- String[] f1s = new String[ins];
- int insDone = 0;
- int[] insDoneCmp = new int[ins];
- for (int i = 0; i < ins; i++) {
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
- f0s[i] = f0;
- f1s[i] = f1;
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("Inserting " + i);
- }
- }
- try {
- indexAccessor.insert(tuple);
- insDone++;
- } catch (TreeIndexException e) {
- }
- insDoneCmp[i] = insDone;
- }
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Deleting from tree...");
- }
- int delDone = 0;
- for (int i = 0; i < ins; i++) {
- TupleUtils.createTuple(tb, tuple, fieldSerdes, f0s[i], f1s[i]);
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("Deleting " + i);
- }
- }
- try {
- indexAccessor.delete(tuple);
- delDone++;
- } catch (TreeIndexException e) {
- }
- if (insDoneCmp[i] != delDone) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("INCONSISTENT STATE, ERROR IN DELETION EXAMPLE.");
- LOGGER.info("INSDONECMP: " + insDoneCmp[i] + " " + delDone);
- }
- break;
- }
- }
- if (insDone != delDone) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
- }
- break;
- }
- }
- btree.close();
+
+ protected int getIndexFileId() {
+ return harness.getBTreeFileId();
}
-
- /**
- * Update example.
- *
- * Create a BTree with one variable-length key field and one variable-length
- * value field. Fill B-tree with random values using insertions, then update
- * entries one-by-one. Repeat procedure a few times on same BTree.
- */
- @Test
- public void updateExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Update example");
- }
-
- // Declare fields.
- int fieldCount = 2;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = UTF8StringPointable.TYPE_TRAITS;
- typeTraits[1] = UTF8StringPointable.TYPE_TRAITS;
- // Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE };
-
- // Declare keys.
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
-
- BTree btree = BTreeUtils
- .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
- btree.create(btreeFileId);
- btree.open(btreeFileId);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Inserting into tree...");
- }
- ITreeIndexAccessor indexAccessor = btree.createAccessor();
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference tuple = new ArrayTupleReference();
- int maxLength = 10;
- int ins = 10000;
- String[] keys = new String[10000];
- for (int i = 0; i < ins; i++) {
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- TupleUtils.createTuple(tb, tuple, fieldSerdes, f0, f1);
- keys[i] = f0;
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("Inserting " + i);
- }
- }
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- }
- }
- // Print before doing any updates.
- orderedScan(btree, indexAccessor, fieldSerdes);
-
- int runs = 3;
- for (int run = 0; run < runs; run++) {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Update test run: " + (run + 1) + "/" + runs);
- LOGGER.info("Updating BTree");
- }
- for (int i = 0; i < ins; i++) {
- // Generate a new random value for f1.
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- TupleUtils.createTuple(tb, tuple, fieldSerdes, keys[i], f1);
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("UPDATING " + i);
- }
- }
- try {
- indexAccessor.update(tuple);
- } catch (TreeIndexException e) {
- }
- }
- // Do another scan after a round of updates.
- orderedScan(btree, indexAccessor, fieldSerdes);
- }
- btree.close();
- }
-
- /**
- * Bulk load example.
- *
- * Load a tree with 100,000 tuples. BTree has a composite key to "simulate"
- * non-unique index creation.
- *
- */
- @Test
- public void bulkLoadExample() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Bulk load example");
- }
- // Declare fields.
- int fieldCount = 3;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = IntegerPointable.TYPE_TRAITS;
- typeTraits[1] = IntegerPointable.TYPE_TRAITS;
- typeTraits[2] = IntegerPointable.TYPE_TRAITS;
- // Declare field serdes.
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
- cmps[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
-
- BTree btree = BTreeUtils
- .createBTree(bufferCache, btreeFileId, typeTraits, cmps, BTreeLeafFrameType.REGULAR_NSM);
- btree.create(btreeFileId);
- btree.open(btreeFileId);
-
- // Load sorted records.
- int ins = 100000;
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Bulk loading " + ins + " tuples");
- }
- long start = System.currentTimeMillis();
- IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference tuple = new ArrayTupleReference();
- for (int i = 0; i < ins; i++) {
- TupleUtils.createIntegerTuple(tb, tuple, i, i, 5);
- btree.bulkLoadAddTuple(tuple, bulkLoadCtx);
- }
- btree.endBulkLoad(bulkLoadCtx);
- long end = System.currentTimeMillis();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(ins + " tuples loaded in " + (end - start) + "ms");
- }
-
- ITreeIndexAccessor indexAccessor = btree.createAccessor();
-
- // Build low key.
- ArrayTupleBuilder lowKeyTb = new ArrayTupleBuilder(1);
- ArrayTupleReference lowKey = new ArrayTupleReference();
- TupleUtils.createIntegerTuple(lowKeyTb, lowKey, 44444);
-
- // Build high key.
- ArrayTupleBuilder highKeyTb = new ArrayTupleBuilder(1);
- ArrayTupleReference highKey = new ArrayTupleReference();
- TupleUtils.createIntegerTuple(highKeyTb, highKey, 44500);
-
- // Prefix-Range search in [44444, 44500]
- rangeSearch(btree, indexAccessor, fieldSerdes, lowKey, highKey);
-
- btree.close();
- }
-
- private void orderedScan(BTree btree, ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
- throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Ordered Scan:");
- }
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
- ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame, false);
- RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
- indexAccessor.search(scanCursor, nullPred);
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rec);
- }
- }
- } finally {
- scanCursor.close();
- }
- }
-
- private void diskOrderScan(BTree btree, ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes)
- throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Disk-Order Scan:");
- }
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
- TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
- indexAccessor.diskOrderScan(diskOrderCursor);
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference frameTuple = diskOrderCursor.getTuple();
- String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rec);
- }
- }
- } finally {
- diskOrderCursor.close();
- }
- }
-
- private void rangeSearch(BTree btree, ITreeIndexAccessor indexAccessor, ISerializerDeserializer[] fieldSerdes,
- ITupleReference lowKey, ITupleReference highKey) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- String lowKeyString = TupleUtils.printTuple(lowKey, fieldSerdes);
- String highKeyString = TupleUtils.printTuple(highKey, fieldSerdes);
- LOGGER.info("Range-Search in: [" + lowKeyString + ", " + highKeyString + "]");
- }
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
- MultiComparator lowKeySearchCmp = BTreeUtils.getSearchMultiComparator(btree.getMultiComparator(), lowKey);
- MultiComparator highKeySearchCmp = BTreeUtils.getSearchMultiComparator(btree.getMultiComparator(), highKey);
- ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, lowKeySearchCmp,
- highKeySearchCmp);
- indexAccessor.search(rangeCursor, rangePred);
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = TupleUtils.printTuple(frameTuple, fieldSerdes);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rec);
- }
- }
- } finally {
- rangeCursor.close();
- }
- }
-
- public static String randomString(int length, Random random) {
- String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < s.length() && i < length; i++) {
- strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
- }
- return strBuilder.toString();
- }
-}
\ No newline at end of file
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java
new file mode 100644
index 0000000..0b6cf4d
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeInsertTest.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexInsertTest;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ *
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ *
+ */
+@SuppressWarnings("rawtypes")
+public class BTreeInsertTest extends OrderedIndexInsertTest {
+
+ public BTreeInsertTest() {
+ super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+ }
+
+ private final BTreeTestHarness harness = new BTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+ BTreeLeafFrameType leafType) throws Exception {
+ return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+ leafType);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java
similarity index 78%
rename from hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
rename to hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java
index 4031e7f..4003cf1 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeSearchCursorTest.java
@@ -55,11 +55,12 @@
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.common.util.IndexUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-public class RangeSearchCursorTest extends AbstractBTreeTest {
+public class BTreeSearchCursorTest extends AbstractBTreeTest {
// Declare fields
int fieldCount = 2;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
@@ -83,22 +84,23 @@
LOGGER.info("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
}
+ IBufferCache bufferCache = harness.getBufferCache();
+ int btreeFileId = harness.getBTreeFileId();
+
// declare keys
int keyFieldCount = 1;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
- MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
-
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
- BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+ BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
btree.create(btreeFileId);
btree.open(btreeFileId);
@@ -140,16 +142,10 @@
int maxSearchKey = 100;
// forward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
-
- // backward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
btree.close();
}
@@ -160,23 +156,24 @@
LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
}
+ IBufferCache bufferCache = harness.getBufferCache();
+ int btreeFileId = harness.getBTreeFileId();
+
// declare keys
int keyFieldCount = 2;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
- MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
-
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
- BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+ BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
btree.create(btreeFileId);
btree.open(btreeFileId);
@@ -215,16 +212,10 @@
int maxSearchKey = 100;
// forward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
-
- // backward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
btree.close();
}
@@ -235,23 +226,24 @@
LOGGER.info("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
}
+ IBufferCache bufferCache = harness.getBufferCache();
+ int btreeFileId = harness.getBTreeFileId();
+
// declare keys
int keyFieldCount = 2;
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
- MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
-
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
- BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+ BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
btree.create(btreeFileId);
btree.open(btreeFileId);
@@ -290,22 +282,16 @@
int maxSearchKey = 100;
// forward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
-
- // backward searches
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
- performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, false);
+ performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false);
btree.close();
}
- public RangePredicate createRangePredicate(int lk, int hk, boolean isForward, boolean lowKeyInclusive,
- boolean highKeyInclusive, MultiComparator cmp) throws HyracksDataException {
+ public RangePredicate createRangePredicate(int lk, int hk, boolean lowKeyInclusive,
+ boolean highKeyInclusive) throws HyracksDataException {
// create tuplereferences for search keys
ITupleReference lowKey = TupleUtils.createIntegerTuple(lk);
@@ -315,13 +301,13 @@
searchCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
MultiComparator searchCmp = new MultiComparator(searchCmps);
- RangePredicate rangePred = new RangePredicate(isForward, lowKey, highKey, lowKeyInclusive, highKeyInclusive,
+ RangePredicate rangePred = new RangePredicate(lowKey, highKey, lowKeyInclusive, highKeyInclusive,
searchCmp, searchCmp);
return rangePred;
}
public void getExpectedResults(ArrayList<Integer> expectedResults, ArrayList<Integer> keys, int lk, int hk,
- boolean isForward, boolean lowKeyInclusive, boolean highKeyInclusive) {
+ boolean lowKeyInclusive, boolean highKeyInclusive) {
// special cases
if (lk == hk && (!lowKeyInclusive || !highKeyInclusive))
@@ -329,35 +315,21 @@
if (lk > hk)
return;
- if (isForward) {
- for (int i = 0; i < keys.size(); i++) {
- if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
- expectedResults.add(keys.get(i));
- continue;
- }
-
- if (lk < keys.get(i) && hk > keys.get(i)) {
- expectedResults.add(keys.get(i));
- continue;
- }
+ for (int i = 0; i < keys.size(); i++) {
+ if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
+ expectedResults.add(keys.get(i));
+ continue;
}
- } else {
- for (int i = keys.size() - 1; i >= 0; i--) {
- if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
- expectedResults.add(keys.get(i));
- continue;
- }
- if (lk < keys.get(i) && hk > keys.get(i)) {
- expectedResults.add(keys.get(i));
- continue;
- }
+ if (lk < keys.get(i) && hk > keys.get(i)) {
+ expectedResults.add(keys.get(i));
+ continue;
}
}
}
public boolean performSearches(ArrayList<Integer> keys, BTree btree, IBTreeLeafFrame leafFrame,
- IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean isForward, boolean lowKeyInclusive,
+ IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean lowKeyInclusive,
boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
ArrayList<Integer> results = new ArrayList<Integer>();
@@ -373,8 +345,8 @@
int highKey = j;
ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame, false);
- RangePredicate rangePred = createRangePredicate(lowKey, highKey, isForward, lowKeyInclusive,
- highKeyInclusive, btree.getMultiComparator());
+ RangePredicate rangePred = createRangePredicate(lowKey, highKey, lowKeyInclusive,
+ highKeyInclusive);
ITreeIndexAccessor indexAccessor = btree.createAccessor();
indexAccessor.search(rangeCursor, rangePred);
@@ -394,7 +366,7 @@
rangeCursor.close();
}
- getExpectedResults(expectedResults, keys, lowKey, highKey, isForward, lowKeyInclusive, highKeyInclusive);
+ getExpectedResults(expectedResults, keys, lowKey, highKey, lowKeyInclusive, highKeyInclusive);
if (printExpectedResults) {
if (expectedResults.size() > 0) {
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StatsTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
similarity index 91%
rename from hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StatsTest.java
rename to hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
index 1ae80d6..c33b4e9 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StatsTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeStatsTest.java
@@ -36,9 +36,8 @@
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.common.util.IndexUtils;
import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexBufferCacheWarmup;
import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
@@ -47,11 +46,8 @@
import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
import edu.uci.ics.hyracks.test.support.TestUtils;
-public class StatsTest extends AbstractBTreeTest {
-
- // private static final int PAGE_SIZE = 256;
- // private static final int NUM_PAGES = 10;
- // private static final int PAGE_SIZE = 32768;
+@SuppressWarnings("rawtypes")
+public class BTreeStatsTest extends AbstractBTreeTest {
private static final int PAGE_SIZE = 4096;
private static final int NUM_PAGES = 1000;
private static final int MAX_OPEN_FILES = 10;
@@ -64,7 +60,7 @@
TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
+ FileReference file = new FileReference(new File(harness.getFileName()));
bufferCache.createFile(file);
int fileId = fmp.lookupFileId(file);
bufferCache.openFile(fileId);
@@ -80,8 +76,6 @@
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
- MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
-
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
@@ -91,9 +85,9 @@
IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
- BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+ BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
btree.create(fileId);
btree.open(fileId);
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java
deleted file mode 100644
index 1daa273..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTestDriver.java
+++ /dev/null
@@ -1,133 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import java.util.logging.Level;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
-
-@SuppressWarnings("rawtypes")
-public abstract class BTreeTestDriver extends AbstractBTreeTest {
-
- protected static final int numTuplesToInsert = 10000;
-
- protected abstract void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception;
- protected abstract String getTestOpName();
-
- @Test
- public void oneIntKeyAndValue() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("BTree " + getTestOpName() + " Test With One Int Key And Value.");
- }
-
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- // Range search in [-1000, 1000]
- ITupleReference lowKey = TupleUtils.createIntegerTuple(-1000);
- ITupleReference highKey = TupleUtils.createIntegerTuple(1000);
-
- runTest(fieldSerdes, 1, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, null, null);
- runTest(fieldSerdes, 1, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, null, null);
- }
-
- @Test
- public void twoIntKeys() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys.");
- }
-
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
- // Range search in [50 0, 50 500]
- ITupleReference lowKey = TupleUtils.createIntegerTuple(50, 0);
- ITupleReference highKey = TupleUtils.createIntegerTuple(50, 500);
-
- // Prefix range search in [50, 50]
- ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
- ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
-
- runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- }
-
- @Test
- public void twoIntKeysAndValues() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("BTree " + getTestOpName() + " Test With Two Int Keys And Values.");
- }
-
- ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
-
- // Range search in [50 100, 100 100]
- ITupleReference lowKey = TupleUtils.createIntegerTuple(-100, -100);
- ITupleReference highKey = TupleUtils.createIntegerTuple(100, 100);
-
- // Prefix range search in [50, 50]
- ITupleReference prefixLowKey = TupleUtils.createIntegerTuple(50);
- ITupleReference prefixHighKey = TupleUtils.createIntegerTuple(50);
-
- runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- }
-
- @Test
- public void oneStringKeyAndValue() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("BTree " + getTestOpName() + " Test With One String Key And Value.");
- }
-
- ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-
- // Range search in ["cbf", cc7"]
- ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
- ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7");
-
- runTest(fieldSerdes, 1, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, null, null);
- runTest(fieldSerdes, 1, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, null, null);
- }
-
- @Test
- public void twoStringKeys() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys.");
- }
-
- ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-
- // Range search in ["cbf", "ddd", cc7", "eee"]
- ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
- ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
-
- // Prefix range search in ["cbf", cc7"]
- ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
- ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
-
- runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- }
-
- @Test
- public void twoStringKeysAndValues() throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("BTree " + getTestOpName() + " Test With Two String Keys And Values.");
- }
-
- ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE, UTF8StringSerializerDeserializer.INSTANCE };
-
- // Range search in ["cbf", "ddd", cc7", "eee"]
- ITupleReference lowKey = TupleUtils.createTuple(fieldSerdes, "cbf", "ddd");
- ITupleReference highKey = TupleUtils.createTuple(fieldSerdes, "cc7", "eee");
-
- // Prefix range search in ["cbf", cc7"]
- ITupleReference prefixLowKey = TupleUtils.createTuple(fieldSerdes, "cbf");
- ITupleReference prefixHighKey = TupleUtils.createTuple(fieldSerdes, "cc7");
-
- runTest(fieldSerdes, 2, BTreeLeafFrameType.REGULAR_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- runTest(fieldSerdes, 2, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM, lowKey, highKey, prefixLowKey, prefixHighKey);
- }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateSearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
similarity index 89%
rename from hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateSearchTest.java
rename to hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
index 5d43ae1..2b03a6a 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateSearchTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateSearchTest.java
@@ -30,15 +30,19 @@
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.common.util.IndexUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-public class UpdateSearchTest extends AbstractBTreeTest {
+@SuppressWarnings("rawtypes")
+public class BTreeUpdateSearchTest extends AbstractBTreeTest {
// Update scan test on fixed-length tuples.
@Test
public void test01() throws Exception {
+ IBufferCache bufferCache = harness.getBufferCache();
+ int btreeFileId = harness.getBTreeFileId();
+
// declare fields
int fieldCount = 2;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
@@ -50,8 +54,6 @@
IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
- MultiComparator cmp = IndexUtils.createMultiComparator(cmpFactories);
-
ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE };
@@ -62,8 +64,8 @@
IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
- BTree btree = new BTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+ BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, fieldCount, cmpFactories, freePageManager, interiorFrameFactory, leafFrameFactory);
btree.create(btreeFileId);
btree.open(btreeFileId);
@@ -81,7 +83,7 @@
ITreeIndexAccessor indexAccessor = btree.createAccessor();
int numInserts = 10000;
- for (int i = 0; i < 10000; i++) {
+ for (int i = 0; i < numInserts; i++) {
int f0 = rnd.nextInt() % 10000;
int f1 = 5;
TupleUtils.createIntegerTuple(tb, insertTuple, f0, f1);
@@ -111,7 +113,7 @@
}
// Set the cursor to X latch nodes.
ITreeIndexCursor updateScanCursor = new BTreeRangeSearchCursor(leafFrame, true);
- RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ RangePredicate nullPred = new RangePredicate(null, null, true, true, null, null);
indexAccessor.search(updateScanCursor, nullPred);
try {
while (updateScanCursor.hasNext()) {
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java
new file mode 100644
index 0000000..c3b56d5
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpdateTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexUpdateTest;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class BTreeUpdateTest extends OrderedIndexUpdateTest {
+
+ public BTreeUpdateTest() {
+ super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+ }
+
+ private final BTreeTestHarness harness = new BTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+ BTreeLeafFrameType leafType) throws Exception {
+ return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+ leafType);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java
new file mode 100644
index 0000000..6e14607
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeUpsertTest.java
@@ -0,0 +1,69 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ *
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ *
+ */
+@SuppressWarnings("rawtypes")
+public class BTreeUpsertTest extends OrderedIndexUpsertTest {
+
+ public BTreeUpsertTest() {
+ super(BTreeTestHarness.LEAF_FRAMES_TO_TEST);
+ }
+
+ private final BTreeTestHarness harness = new BTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected OrderedIndexTestContext createTestContext(ISerializerDeserializer[] fieldSerdes, int numKeys,
+ BTreeLeafFrameType leafType) throws Exception {
+ return BTreeTestContext.create(harness.getBufferCache(), harness.getBTreeFileId(), fieldSerdes, numKeys,
+ leafType);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java
deleted file mode 100644
index 49d08a3..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BulkLoadTest.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
-
-@SuppressWarnings("rawtypes")
-public class BulkLoadTest extends BTreeTestDriver {
-
- @Override
- protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
- BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
-
- // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
- if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
- BTreeTestUtils.bulkLoadIntTuples(testCtx, numTuplesToInsert, rnd);
- } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
- BTreeTestUtils.bulkLoadStringTuples(testCtx, numTuplesToInsert, rnd);
- }
-
- BTreeTestUtils.checkPointSearches(testCtx);
- BTreeTestUtils.checkOrderedScan(testCtx);
- BTreeTestUtils.checkDiskOrderScan(testCtx);
- BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
- if (prefixLowKey != null && prefixHighKey != null) {
- BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
- }
- }
-
- @Override
- protected String getTestOpName() {
- return "BulkLoad";
- }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java
deleted file mode 100644
index 2157adb..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/DeleteTest.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
-
-@SuppressWarnings("rawtypes")
-public class DeleteTest extends BTreeTestDriver {
-
- private static final int numInsertRounds = 3;
- private static final int numDeleteRounds = 3;
-
- @Override
- protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
- BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
- for (int i = 0; i < numInsertRounds; i++) {
-
- // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
- if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
- BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
- } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
- BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
- }
-
- int numTuplesPerDeleteRound = (int)Math.ceil((float)testCtx.checkTuples.size() / (float)numDeleteRounds);
- for(int j = 0; j < numDeleteRounds; j++) {
- BTreeTestUtils.deleteTuples(testCtx, numTuplesPerDeleteRound, rnd);
-
- BTreeTestUtils.checkPointSearches(testCtx);
- BTreeTestUtils.checkOrderedScan(testCtx);
- BTreeTestUtils.checkDiskOrderScan(testCtx);
- BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
- if (prefixLowKey != null && prefixHighKey != null) {
- BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
- }
- }
- }
- }
-
- @Override
- protected String getTestOpName() {
- return "Delete";
- }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
index e8aa00a..d61d16a 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/FieldPrefixNSMTest.java
@@ -45,9 +45,11 @@
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+@SuppressWarnings("rawtypes")
public class FieldPrefixNSMTest extends AbstractBTreeTest {
private static final int PAGE_SIZE = 32768; // 32K
@@ -55,6 +57,10 @@
private static final int MAX_OPEN_FILES = 10;
private static final int HYRACKS_FRAME_SIZE = 128;
+ public FieldPrefixNSMTest() {
+ super(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES, HYRACKS_FRAME_SIZE);
+ }
+
private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
throws HyracksDataException {
if (print) {
@@ -67,7 +73,7 @@
FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
DataOutput dos = tb.getDataOutput();
-
+
ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
@@ -92,8 +98,8 @@
}
@Test
- public void test01() throws Exception {
-
+ public void test01() throws Exception {
+
// declare fields
int fieldCount = 3;
ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
@@ -116,6 +122,9 @@
Random rnd = new Random();
rnd.setSeed(50);
+ IBufferCache bufferCache = harness.getBufferCache();
+ int btreeFileId = harness.getBTreeFileId();
+ IHyracksTaskContext ctx = harness.getHyracksTaskContext();
ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(btreeFileId, 0), false);
try {
@@ -213,20 +222,4 @@
bufferCache.unpin(page);
}
}
-
- public int getPageSize() {
- return PAGE_SIZE;
- }
-
- public int getNumPages() {
- return NUM_PAGES;
- }
-
- public int getHyracksFrameSize() {
- return HYRACKS_FRAME_SIZE;
- }
-
- public int getMaxOpenFiles() {
- return MAX_OPEN_FILES;
- }
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java
deleted file mode 100644
index fa3f895..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/InsertTest.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
-
-/**
- * Tests the BTree insert operation with strings and integer fields using
- * various numbers of key and payload fields.
- *
- * Each tests first fills a BTree with randomly generated tuples.
- * We compare the following operations against expected results:
- * 1. Point searches for all tuples.
- * 2. Ordered scan.
- * 3. Disk-order scan.
- * 4. Range search (and prefix search for composite keys).
- *
- */
-@SuppressWarnings("rawtypes")
-public class InsertTest extends BTreeTestDriver {
- @Override
- protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
- BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
- // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
- if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
- BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
- } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
- BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
- }
-
- BTreeTestUtils.checkPointSearches(testCtx);
- BTreeTestUtils.checkOrderedScan(testCtx);
- BTreeTestUtils.checkDiskOrderScan(testCtx);
-
- BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
- if (prefixLowKey != null && prefixHighKey != null) {
- BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
- }
- testCtx.btree.close();
- }
-
- @Override
- protected String getTestOpName() {
- return "Insert";
- }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
index ac4133d..9ec64b5 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
@@ -15,7 +15,6 @@
package edu.uci.ics.hyracks.storage.am.btree;
-import java.io.File;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
@@ -24,21 +23,13 @@
import org.junit.Test;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.storage.am.btree.util.AbstractBTreeTest;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
import edu.uci.ics.hyracks.storage.common.sync.LatchType;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-public class StorageManagerTest extends AbstractBTreeTest {
- private static final int PAGE_SIZE = 256;
- private static final int NUM_PAGES = 10;
- private static final int MAX_OPEN_FILES = 10;
- private static final int HYRACKS_FRAME_SIZE = 32768;
-
+public class StorageManagerTest extends AbstractBTreeTest {
public class PinnedLatchedPage {
public final ICachedPage page;
public final LatchType latch;
@@ -258,38 +249,11 @@
}
@Test
- public void oneThreadOneFileTest() throws Exception {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
-
- Thread worker = new Thread(new FileAccessWorker(0, bufferCache, FileAccessType.FTA_UNLATCHED, fileId, 10, 10,
- 100, 10, 0));
-
+ public void oneThreadOneFileTest() throws Exception {
+ Thread worker = new Thread(new FileAccessWorker(0,
+ harness.getBufferCache(), FileAccessType.FTA_UNLATCHED,
+ harness.getBTreeFileId(), 10, 10, 100, 10, 0));
worker.start();
-
worker.join();
-
- bufferCache.close();
- }
-
- public int getPageSize() {
- return PAGE_SIZE;
- }
-
- public int getNumPages() {
- return NUM_PAGES;
- }
-
- public int getHyracksFrameSize() {
- return HYRACKS_FRAME_SIZE;
- }
-
- public int getMaxOpenFiles() {
- return MAX_OPEN_FILES;
}
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java
deleted file mode 100644
index b40539f..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/UpdateTest.java
+++ /dev/null
@@ -1,64 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree;
-
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestContext;
-import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestUtils;
-
-@SuppressWarnings("rawtypes")
-public class UpdateTest extends BTreeTestDriver {
- private static final int numUpdateRounds = 3;
-
- @Override
- protected void runTest(ISerializerDeserializer[] fieldSerdes, int numKeys, BTreeLeafFrameType leafType, ITupleReference lowKey, ITupleReference highKey, ITupleReference prefixLowKey, ITupleReference prefixHighKey) throws Exception {
- // This is a noop because we can only update non-key fields.
- if (fieldSerdes.length == numKeys) {
- return;
- }
-
- BTreeTestContext testCtx = BTreeTestUtils.createBTreeTestContext(bufferCache, btreeFileId, fieldSerdes, numKeys, leafType);
-
- // We assume all fieldSerdes are of the same type. Check the first one to determine which field types to generate.
- if (fieldSerdes[0] instanceof IntegerSerializerDeserializer) {
- BTreeTestUtils.insertIntTuples(testCtx, numTuplesToInsert, rnd);
- } else if (fieldSerdes[0] instanceof UTF8StringSerializerDeserializer) {
- BTreeTestUtils.insertStringTuples(testCtx, numTuplesToInsert, rnd);
- }
-
- int numTuplesPerDeleteRound = (int)Math.ceil((float)testCtx.checkTuples.size() / (float)numUpdateRounds);
- for(int j = 0; j < numUpdateRounds; j++) {
- BTreeTestUtils.updateTuples(testCtx, numTuplesPerDeleteRound, rnd);
-
- BTreeTestUtils.checkPointSearches(testCtx);
- BTreeTestUtils.checkOrderedScan(testCtx);
- BTreeTestUtils.checkDiskOrderScan(testCtx);
- BTreeTestUtils.checkRangeSearch(testCtx, lowKey, highKey, true, true);
- if (prefixLowKey != null && prefixHighKey != null) {
- BTreeTestUtils.checkRangeSearch(testCtx, prefixLowKey, prefixHighKey, true, true);
- }
- }
- }
-
- @Override
- protected String getTestOpName() {
- return "Update";
- }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
new file mode 100644
index 0000000..596fa31
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.multithread;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexMultiThreadTest;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeTestHarness;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+
+public class BTreeMultiThreadTest extends OrderedIndexMultiThreadTest {
+
+ private BTreeTestHarness harness = new BTreeTestHarness();
+
+ private BTreeTestWorkerFactory workerFactory = new BTreeTestWorkerFactory();
+
+ @Override
+ protected void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] cmpFactories) throws TreeIndexException {
+ return BTreeUtils.createBTree(harness.getBufferCache(), harness.getOpCallback(), typeTraits, cmpFactories, BTreeLeafFrameType.REGULAR_NSM);
+ }
+
+ @Override
+ protected ITreeIndexTestWorkerFactory getWorkerFactory() {
+ return workerFactory;
+ }
+
+ @Override
+ protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
+ ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
+
+ // Insert only workload.
+ TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
+ workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
+
+ // Inserts mixed with point searches and scans.
+ TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
+ workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
+
+ // Inserts, updates, deletes, and upserts.
+ TestOperation[] insertDeleteUpdateUpsertOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.UPSERT };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateUpsertOps, getUniformOpProbs(insertDeleteUpdateUpsertOps)));
+
+ // All operations mixed.
+ TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.UPSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
+ workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
+
+ return workloadConfs;
+ }
+
+ @Override
+ protected int getFileId() {
+ return harness.getBTreeFileId();
+ }
+
+ @Override
+ protected String getIndexTypeName() {
+ return "BTree";
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
new file mode 100644
index 0000000..7d8de7d
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
@@ -0,0 +1,133 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.multithread;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNonExistentKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeNotUpdateableException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class BTreeTestWorker extends AbstractTreeIndexTestWorker {
+
+ private final BTree btree;
+ private final int numKeyFields;
+ private final ArrayTupleBuilder deleteTb;
+ private final ArrayTupleReference deleteTuple = new ArrayTupleReference();
+
+ public BTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+ super(dataGen, opSelector, index, numBatches);
+ btree = (BTree) index;
+ numKeyFields = btree.getComparatorFactories().length;
+ deleteTb = new ArrayTupleBuilder(numKeyFields);
+ }
+
+ @Override
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, TreeIndexException {
+ BTree.BTreeAccessor accessor = (BTree.BTreeAccessor) indexAccessor;
+ ITreeIndexCursor searchCursor = accessor.createSearchCursor();
+ ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
+ MultiComparator cmp = accessor.getOpContext().cmp;
+ RangePredicate rangePred = new RangePredicate(tuple, tuple, true, true, cmp, cmp);
+
+ switch (op) {
+ case INSERT:
+ try {
+ accessor.insert(tuple);
+ } catch (BTreeDuplicateKeyException e) {
+ // Ignore duplicate keys, since we get random tuples.
+ }
+ break;
+
+ case DELETE:
+ // Create a tuple reference with only key fields.
+ deleteTb.reset();
+ for (int i = 0; i < numKeyFields; i++) {
+ deleteTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ deleteTuple.reset(deleteTb.getFieldEndOffsets(), deleteTb.getByteArray());
+ try {
+ accessor.delete(deleteTuple);
+ } catch (BTreeNonExistentKeyException e) {
+ // Ignore non-existant keys, since we get random tuples.
+ }
+ break;
+
+ case UPDATE:
+ try {
+ accessor.update(tuple);
+ } catch (BTreeNonExistentKeyException e) {
+ // Ignore non-existant keys, since we get random tuples.
+ } catch (BTreeNotUpdateableException e) {
+ // Ignore not updateable exception due to numKeys == numFields.
+ }
+ break;
+
+ case UPSERT:
+ accessor.upsert(tuple);
+ // Upsert should not throw. If it does, there's
+ // a bigger problem and the test should fail.
+ break;
+
+ case POINT_SEARCH:
+ searchCursor.reset();
+ rangePred.setLowKey(tuple, true);
+ rangePred.setHighKey(tuple, true);
+ accessor.search(searchCursor, rangePred);
+ consumeCursorTuples(searchCursor);
+ break;
+
+ case SCAN:
+ searchCursor.reset();
+ rangePred.setLowKey(null, true);
+ rangePred.setHighKey(null, true);
+ accessor.search(searchCursor, rangePred);
+ consumeCursorTuples(searchCursor);
+ break;
+
+ case DISKORDER_SCAN:
+ diskOrderScanCursor.reset();
+ accessor.diskOrderScan(diskOrderScanCursor);
+ consumeCursorTuples(diskOrderScanCursor);
+ break;
+
+ default:
+ throw new HyracksDataException("Op " + op.toString() + " not supported.");
+ }
+ }
+
+ private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
+ try {
+ while(cursor.hasNext()) {
+ cursor.next();
+ }
+ } finally {
+ cursor.close();
+ }
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java
new file mode 100644
index 0000000..dc4d883
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorkerFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.multithread;
+
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+public class BTreeTestWorkerFactory implements ITreeIndexTestWorkerFactory {
+ @Override
+ public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
+ ITreeIndex index, int numBatches) {
+ return new BTreeTestWorker(dataGen, opSelector, index, numBatches);
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
index 9630a1b..f4eca1b 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/AbstractBTreeTest.java
@@ -1,76 +1,46 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.btree.util;
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
import java.util.logging.Logger;
import org.junit.After;
import org.junit.Before;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
public abstract class AbstractBTreeTest {
- protected static final Logger LOGGER = Logger.getLogger(AbstractBTreeTest.class.getName());
- public static final long RANDOM_SEED = 50;
-
- private static final int PAGE_SIZE = 256;
- private static final int NUM_PAGES = 10;
- private static final int MAX_OPEN_FILES = 10;
- private static final int HYRACKS_FRAME_SIZE = 128;
-
- protected IHyracksTaskContext ctx;
- protected IBufferCache bufferCache;
- protected int btreeFileId;
-
- protected final Random rnd = new Random();
- protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
- protected final static String tmpDir = System.getProperty("java.io.tmpdir");
- protected final static String sep = System.getProperty("file.separator");
- protected String fileName;
-
- @Before
- public void setUp() throws HyracksDataException {
- fileName = tmpDir + sep + simpleDateFormat.format(new Date());
- ctx = TestUtils.create(getHyracksFrameSize());
- TestStorageManagerComponentHolder.init(getPageSize(), getNumPages(), getMaxOpenFiles());
- bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- btreeFileId = fmp.lookupFileId(file);
- bufferCache.openFile(btreeFileId);
- rnd.setSeed(RANDOM_SEED);
+ protected final Logger LOGGER = Logger.getLogger(BTreeTestHarness.class.getName());
+ protected final BTreeTestHarness harness;
+
+ public AbstractBTreeTest() {
+ harness = new BTreeTestHarness();
}
- @After
+ public AbstractBTreeTest(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+ harness = new BTreeTestHarness(pageSize, numPages, maxOpenFiles, hyracksFrameSize);
+ }
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
public void tearDown() throws HyracksDataException {
- bufferCache.closeFile(btreeFileId);
- bufferCache.close();
- File f = new File(fileName);
- f.deleteOnExit();
- }
-
- public int getPageSize() {
- return PAGE_SIZE;
- }
-
- public int getNumPages() {
- return NUM_PAGES;
- }
-
- public int getHyracksFrameSize() {
- return HYRACKS_FRAME_SIZE;
- }
-
- public int getMaxOpenFiles() {
- return MAX_OPEN_FILES;
+ harness.tearDown();
}
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
index f1b03c1..b820f93 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestContext.java
@@ -15,49 +15,43 @@
package edu.uci.ics.hyracks.storage.am.btree.util;
-import java.util.TreeSet;
-
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.btree.OrderedIndexTestContext;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
@SuppressWarnings("rawtypes")
-public final class BTreeTestContext {
- public final ISerializerDeserializer[] fieldSerdes;
- public final IBufferCache bufferCache;
- public final BTree btree;
- public final IBTreeLeafFrame leafFrame;
- public final IBTreeInteriorFrame interiorFrame;
- public final ITreeIndexMetaDataFrame metaFrame;
- public final ArrayTupleBuilder tupleBuilder;
- public final ArrayTupleReference tuple = new ArrayTupleReference();
- public final TreeSet<CheckTuple> checkTuples = new TreeSet<CheckTuple>();
- public final ITreeIndexAccessor indexAccessor;
-
- public BTreeTestContext(IBufferCache bufferCache, ISerializerDeserializer[] fieldSerdes, BTree btree,
- IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame,
- ITreeIndexAccessor indexAccessor) {
- this.bufferCache = bufferCache;
- this.fieldSerdes = fieldSerdes;
- this.btree = btree;
- this.leafFrame = leafFrame;
- this.interiorFrame = interiorFrame;
- this.metaFrame = metaFrame;
- this.indexAccessor = indexAccessor;
- this.tupleBuilder = new ArrayTupleBuilder(fieldSerdes.length);
+public class BTreeTestContext extends OrderedIndexTestContext {
+
+ public BTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+ super(fieldSerdes, treeIndex);
}
- public int getFieldCount() {
- return fieldSerdes.length;
- }
-
+ @Override
public int getKeyFieldCount() {
- return btree.getMultiComparator().getKeyFieldCount();
+ BTree btree = (BTree) treeIndex;
+ return btree.getComparatorFactories().length;
}
-}
\ No newline at end of file
+
+ @Override
+ public IBinaryComparatorFactory[] getComparatorFactories() {
+ BTree btree = (BTree) treeIndex;
+ return btree.getComparatorFactories();
+ }
+
+ public static BTreeTestContext create(IBufferCache bufferCache, int btreeFileId, ISerializerDeserializer[] fieldSerdes, int numKeyFields, BTreeLeafFrameType leafType) throws Exception {
+ ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+ IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
+ BTree btree = BTreeUtils.createBTree(bufferCache, NoOpOperationCallback.INSTANCE, typeTraits, cmpFactories, leafType);
+ btree.create(btreeFileId);
+ btree.open(btreeFileId);
+ BTreeTestContext testCtx = new BTreeTestContext(fieldSerdes, btree);
+ return testCtx;
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java
new file mode 100644
index 0000000..1b450d8
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestHarness.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree.util;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.common.api.IOperationCallback;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BTreeTestHarness {
+ public static final BTreeLeafFrameType[] LEAF_FRAMES_TO_TEST = new BTreeLeafFrameType[] {
+ BTreeLeafFrameType.REGULAR_NSM, BTreeLeafFrameType.FIELD_PREFIX_COMPRESSED_NSM };
+
+ private static final long RANDOM_SEED = 50;
+ private static final int DEFAULT_PAGE_SIZE = 256;
+ private static final int DEFAULT_NUM_PAGES = 100;
+ private static final int DEFAULT_MAX_OPEN_FILES = 10;
+ private static final int DEFAULT_HYRACKS_FRAME_SIZE = 128;
+
+ protected final int pageSize;
+ protected final int numPages;
+ protected final int maxOpenFiles;
+ protected final int hyracksFrameSize;
+
+ protected IHyracksTaskContext ctx;
+ protected IBufferCache bufferCache;
+ protected int btreeFileId;
+
+ protected final Random rnd = new Random();
+ protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ protected final String tmpDir = System.getProperty("java.io.tmpdir");
+ protected final String sep = System.getProperty("file.separator");
+ protected String fileName;
+
+ public BTreeTestHarness() {
+ this.pageSize = DEFAULT_PAGE_SIZE;
+ this.numPages = DEFAULT_NUM_PAGES;
+ this.maxOpenFiles = DEFAULT_MAX_OPEN_FILES;
+ this.hyracksFrameSize = DEFAULT_HYRACKS_FRAME_SIZE;
+ }
+
+ public BTreeTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+ this.pageSize = pageSize;
+ this.numPages = numPages;
+ this.maxOpenFiles = maxOpenFiles;
+ this.hyracksFrameSize = hyracksFrameSize;
+ }
+
+ public void setUp() throws HyracksDataException {
+ fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+ ctx = TestUtils.create(getHyracksFrameSize());
+ TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
+ bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ btreeFileId = fmp.lookupFileId(file);
+ bufferCache.openFile(btreeFileId);
+ rnd.setSeed(RANDOM_SEED);
+ }
+
+ public void tearDown() throws HyracksDataException {
+ bufferCache.closeFile(btreeFileId);
+ bufferCache.close();
+ File f = new File(fileName);
+ f.deleteOnExit();
+ }
+
+ public IHyracksTaskContext getHyracksTaskContext() {
+ return ctx;
+ }
+
+ public IBufferCache getBufferCache() {
+ return bufferCache;
+ }
+
+ public int getBTreeFileId() {
+ return btreeFileId;
+ }
+
+ public String getFileName() {
+ return fileName;
+ }
+
+ public Random getRandom() {
+ return rnd;
+ }
+
+ public int getPageSize() {
+ return pageSize;
+ }
+
+ public int getNumPages() {
+ return numPages;
+ }
+
+ public int getHyracksFrameSize() {
+ return hyracksFrameSize;
+ }
+
+ public int getMaxOpenFiles() {
+ return maxOpenFiles;
+ }
+
+ public IOperationCallback getOpCallback() {
+ return NoOpOperationCallback.INSTANCE;
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java
deleted file mode 100644
index b5186b0..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/BTreeTestUtils.java
+++ /dev/null
@@ -1,505 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.btree.util;
-
-import static org.junit.Assert.fail;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.util.Iterator;
-import java.util.NavigableSet;
-import java.util.Random;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
-import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-
-@SuppressWarnings("rawtypes")
-public class BTreeTestUtils {
- private static final Logger LOGGER = Logger.getLogger(BTreeTestUtils.class.getName());
-
- public static BTreeTestContext createBTreeTestContext(IBufferCache bufferCache, int btreeFileId, ISerializerDeserializer[] fieldSerdes, int numKeyFields, BTreeLeafFrameType leafType) throws Exception {
- ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, fieldSerdes.length);
- IBinaryComparator[] cmps = SerdeUtils.serdesToComparators(fieldSerdes, numKeyFields);
-
- BTree btree = BTreeUtils.createBTree(bufferCache, btreeFileId, typeTraits, cmps, leafType);
- btree.create(btreeFileId);
- btree.open(btreeFileId);
- ITreeIndexAccessor indexAccessor = btree.createAccessor();
-
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) btree.getLeafFrameFactory().createFrame();
- IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) btree.getInteriorFrameFactory().createFrame();
- ITreeIndexMetaDataFrame metaFrame = btree.getFreePageManager().getMetaDataFrameFactory().createFrame();
- BTreeTestContext testCtx = new BTreeTestContext(bufferCache, fieldSerdes, btree, leafFrame, interiorFrame, metaFrame, indexAccessor);
- return testCtx;
- }
-
- private static void compareActualAndExpected(ITupleReference actual, CheckTuple expected, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
- for (int i = 0; i < fieldSerdes.length; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(
- actual.getFieldData(i), actual.getFieldStart(i),
- actual.getFieldLength(i));
- DataInput dataIn = new DataInputStream(inStream);
- Object actualObj = fieldSerdes[i].deserialize(dataIn);
- if (!actualObj.equals(expected.get(i))) {
- fail("Actual and expected fields do not match.\nExpected: " + expected.get(i) + "\nActual : " + actualObj);
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- private static CheckTuple createCheckTupleFromTuple(ITupleReference tuple, ISerializerDeserializer[] fieldSerdes, int numKeys) throws HyracksDataException {
- CheckTuple checkTuple = new CheckTuple(fieldSerdes.length, numKeys);
- int fieldCount = Math.min(fieldSerdes.length, tuple.getFieldCount());
- for (int i = 0; i < fieldCount; i++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(
- tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
- DataInput dataIn = new DataInputStream(inStream);
- Comparable fieldObj = (Comparable)fieldSerdes[i].deserialize(dataIn);
- checkTuple.add(fieldObj);
- }
- return checkTuple;
- }
-
- @SuppressWarnings("unchecked")
- private static void createTupleFromCheckTuple(CheckTuple checkTuple, ArrayTupleBuilder tupleBuilder, ArrayTupleReference tuple, ISerializerDeserializer[] fieldSerdes) throws HyracksDataException {
- int fieldCount = tupleBuilder.getFieldEndOffsets().length;
- DataOutput dos = tupleBuilder.getDataOutput();
- tupleBuilder.reset();
- for (int i = 0; i < fieldCount; i++) {
- fieldSerdes[i].serialize(checkTuple.get(i), dos);
- tupleBuilder.addFieldEndOffset();
- }
- tuple.reset(tupleBuilder.getFieldEndOffsets(), tupleBuilder.getByteArray());
- }
-
- public static void checkOrderedScan(BTreeTestContext testCtx) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Testing Ordered Scan.");
- }
- ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(testCtx.leafFrame, false);
- RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
- testCtx.indexAccessor.search(scanCursor, nullPred);
- Iterator<CheckTuple> checkIter = testCtx.checkTuples.iterator();
- int actualCount = 0;
- try {
- while (scanCursor.hasNext()) {
- if (!checkIter.hasNext()) {
- fail("Ordered scan returned more answers than expected.\nExpected: " + testCtx.checkTuples.size());
- }
- scanCursor.next();
- CheckTuple expectedTuple = checkIter.next();
- ITupleReference tuple = scanCursor.getTuple();
- compareActualAndExpected(tuple, expectedTuple, testCtx.fieldSerdes);
- actualCount++;
- }
- if (actualCount < testCtx.checkTuples.size()) {
- fail("Ordered scan returned fewer answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual : " + actualCount);
- }
- } finally {
- scanCursor.close();
- }
- }
-
- public static void checkDiskOrderScan(BTreeTestContext testCtx) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Testing Disk-Order Scan.");
- }
- ITreeIndexCursor diskOrderCursor = new TreeDiskOrderScanCursor(testCtx.leafFrame);
- testCtx.indexAccessor.diskOrderScan(diskOrderCursor);
- int actualCount = 0;
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference tuple = diskOrderCursor.getTuple();
- CheckTuple checkTuple = createCheckTupleFromTuple(tuple, testCtx.fieldSerdes, testCtx.btree.getMultiComparator().getKeyFieldCount());
- if (!testCtx.checkTuples.contains(checkTuple)) {
- fail("Disk-order scan returned unexpected answer: " + checkTuple.toString());
- }
- actualCount++;
- }
- if (actualCount < testCtx.checkTuples.size()) {
- fail("Disk-order scan returned fewer answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual : " + actualCount);
- }
- if (actualCount > testCtx.checkTuples.size()) {
- fail("Disk-order scan returned more answers than expected.\nExpected: " + testCtx.checkTuples.size() + "\nActual : " + actualCount);
- }
- } finally {
- diskOrderCursor.close();
- }
- }
-
- public static void checkRangeSearch(BTreeTestContext testCtx, ITupleReference lowKey, ITupleReference highKey, boolean lowKeyInclusive, boolean highKeyInclusive) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Testing Range Search.");
- }
- MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), lowKey);
- MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), highKey);
- ITreeIndexCursor searchCursor = new BTreeRangeSearchCursor(testCtx.leafFrame, false);
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, lowKeyInclusive, highKeyInclusive, lowKeyCmp, highKeyCmp);
- testCtx.indexAccessor.search(searchCursor, rangePred);
- // Get the subset of elements from the expected set within given key range.
- CheckTuple lowKeyCheck = createCheckTupleFromTuple(lowKey, testCtx.fieldSerdes, lowKeyCmp.getKeyFieldCount());
- CheckTuple highKeyCheck = createCheckTupleFromTuple(highKey, testCtx.fieldSerdes, highKeyCmp.getKeyFieldCount());
- NavigableSet<CheckTuple> expectedSubset = null;
- if (lowKeyCmp.getKeyFieldCount() < testCtx.btree.getMultiComparator().getKeyFieldCount() ||
- highKeyCmp.getKeyFieldCount() < testCtx.btree.getMultiComparator().getKeyFieldCount()) {
- // Searching on a key prefix (low key or high key or both).
- expectedSubset = getPrefixExpectedSubset(testCtx.checkTuples, lowKeyCheck, highKeyCheck);
- } else {
- // Searching on all key fields.
- expectedSubset = testCtx.checkTuples.subSet(lowKeyCheck, lowKeyInclusive, highKeyCheck, highKeyInclusive);
- }
- Iterator<CheckTuple> checkIter = expectedSubset.iterator();
- int actualCount = 0;
- try {
- while (searchCursor.hasNext()) {
- if (!checkIter.hasNext()) {
- fail("Range search returned more answers than expected.\nExpected: " + expectedSubset.size());
- }
- searchCursor.next();
- CheckTuple expectedTuple = checkIter.next();
- ITupleReference tuple = searchCursor.getTuple();
- compareActualAndExpected(tuple, expectedTuple, testCtx.fieldSerdes);
- actualCount++;
- }
- if (actualCount < expectedSubset.size()) {
- fail("Range search returned fewer answers than expected.\nExpected: " + expectedSubset.size() + "\nActual : " + actualCount);
- }
- } finally {
- searchCursor.close();
- }
- }
-
- public static void checkPointSearches(BTreeTestContext testCtx) throws Exception {
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("Testing Point Searches On All Expected Keys.");
- }
- ITreeIndexCursor searchCursor = new BTreeRangeSearchCursor(testCtx.leafFrame, false);
-
- ArrayTupleBuilder lowKeyBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
- ArrayTupleReference lowKey = new ArrayTupleReference();
- ArrayTupleBuilder highKeyBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
- ArrayTupleReference highKey = new ArrayTupleReference();
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, null, null);
-
- // Iterate through expected tuples, and perform a point search in the BTree to verify the tuple can be reached.
- for (CheckTuple checkTuple : testCtx.checkTuples) {
- createTupleFromCheckTuple(checkTuple, lowKeyBuilder, lowKey, testCtx.fieldSerdes);
- createTupleFromCheckTuple(checkTuple, highKeyBuilder, highKey, testCtx.fieldSerdes);
- MultiComparator lowKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), lowKey);
- MultiComparator highKeyCmp = BTreeUtils.getSearchMultiComparator(testCtx.btree.getMultiComparator(), highKey);
-
- rangePred.setLowKey(lowKey, true);
- rangePred.setHighKey(highKey, true);
- rangePred.setLowKeyComparator(lowKeyCmp);
- rangePred.setHighKeyComparator(highKeyCmp);
-
- testCtx.indexAccessor.search(searchCursor, rangePred);
-
- try {
- // We expect exactly one answer.
- if (searchCursor.hasNext()) {
- searchCursor.next();
- ITupleReference tuple = searchCursor.getTuple();
- compareActualAndExpected(tuple, checkTuple, testCtx.fieldSerdes);
- }
- if (searchCursor.hasNext()) {
- fail("Point search returned more than one answer.");
- }
- } finally {
- searchCursor.close();
- }
- }
- }
-
- @SuppressWarnings("unchecked")
- // Create a new TreeSet containing the elements satisfying the prefix search.
- // Implementing prefix search by changing compareTo() in CheckTuple does not work.
- public static TreeSet<CheckTuple> getPrefixExpectedSubset(TreeSet<CheckTuple> checkTuples, CheckTuple lowKey, CheckTuple highKey) {
- TreeSet<CheckTuple> expectedSubset = new TreeSet<CheckTuple>();
- Iterator<CheckTuple> iter = checkTuples.iterator();
- while(iter.hasNext()) {
- CheckTuple t = iter.next();
- boolean geLowKey = true;
- boolean leHighKey = true;
- for (int i = 0; i < lowKey.getNumKeys(); i++) {
- if (t.get(i).compareTo(lowKey.get(i)) < 0) {
- geLowKey = false;
- break;
- }
- }
- for (int i = 0; i < highKey.getNumKeys(); i++) {
- if (t.get(i).compareTo(highKey.get(i)) > 0) {
- leHighKey = false;
- break;
- }
- }
- if (geLowKey && leHighKey) {
- expectedSubset.add(t);
- }
- }
- return expectedSubset;
- }
-
- public static void insertIntTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
- int fieldCount = testCtx.getFieldCount();
- int numKeyFields = testCtx.getKeyFieldCount();
- int[] tupleValues = new int[testCtx.getFieldCount()];
- // Scale range of values according to number of keys.
- // For example, for 2 keys we want the square root of numTuples, for 3 keys the cube root of numTuples, etc.
- int maxValue = (int)Math.ceil(Math.pow(numTuples, 1.0/(double)numKeyFields));
- for (int i = 0; i < numTuples; i++) {
- // Set keys.
- for (int j = 0; j < numKeyFields; j++) {
- tupleValues[j] = rnd.nextInt() % maxValue;
- }
- // Set values.
- for (int j = numKeyFields; j < fieldCount; j++) {
- tupleValues[j] = j;
- }
- TupleUtils.createIntegerTuple(testCtx.tupleBuilder, testCtx.tuple, tupleValues);
- if (LOGGER.isLoggable(Level.INFO)) {
- if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
- LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
- }
- }
- try {
- testCtx.indexAccessor.insert(testCtx.tuple);
- // Set expected values. Do this only after insertion succeeds because we ignore duplicate keys.
- CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldCount, numKeyFields);
- for(int v : tupleValues) {
- checkTuple.add(v);
- }
- testCtx.checkTuples.add(checkTuple);
- } catch (BTreeDuplicateKeyException e) {
- // Ignore duplicate key insertions.
- }
- }
- }
-
- public static void insertStringTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
- int fieldCount = testCtx.getFieldCount();
- int numKeyFields = testCtx.getKeyFieldCount();
- Object[] tupleValues = new Object[fieldCount];
- for (int i = 0; i < numTuples; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
- if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
- LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
- }
- }
- // Set keys.
- for (int j = 0; j < numKeyFields; j++) {
- int length = (Math.abs(rnd.nextInt()) % 10) + 1;
- tupleValues[j] = getRandomString(length, rnd);
- }
- // Set values.
- for (int j = numKeyFields; j < fieldCount; j++) {
- tupleValues[j] = getRandomString(5, rnd);
- }
- TupleUtils.createTuple(testCtx.tupleBuilder, testCtx.tuple, testCtx.fieldSerdes, tupleValues);
- try {
- testCtx.indexAccessor.insert(testCtx.tuple);
- // Set expected values. Do this only after insertion succeeds because we ignore duplicate keys.
- CheckTuple<String> checkTuple = new CheckTuple<String>(fieldCount, numKeyFields);
- for(Object v : tupleValues) {
- checkTuple.add((String)v);
- }
- testCtx.checkTuples.add(checkTuple);
- } catch (BTreeDuplicateKeyException e) {
- // Ignore duplicate key insertions.
- }
- }
- }
-
- public static void bulkLoadIntTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
- int fieldCount = testCtx.getFieldCount();
- int numKeyFields = testCtx.getKeyFieldCount();
- int[] tupleValues = new int[testCtx.getFieldCount()];
- int maxValue = (int)Math.ceil(Math.pow(numTuples, 1.0/(double)numKeyFields));
- for (int i = 0; i < numTuples; i++) {
- // Set keys.
- for (int j = 0; j < numKeyFields; j++) {
- tupleValues[j] = rnd.nextInt() % maxValue;
- }
- // Set values.
- for (int j = numKeyFields; j < fieldCount; j++) {
- tupleValues[j] = j;
- }
-
- // Set expected values. We also use these as the pre-sorted stream for bulk loading.
- CheckTuple<Integer> checkTuple = new CheckTuple<Integer>(fieldCount, numKeyFields);
- for(int v : tupleValues) {
- checkTuple.add(v);
- }
- testCtx.checkTuples.add(checkTuple);
- }
-
- bulkLoadCheckTuples(testCtx, numTuples);
- }
-
- public static void bulkLoadStringTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
- int fieldCount = testCtx.getFieldCount();
- int numKeyFields = testCtx.getKeyFieldCount();
- String[] tupleValues = new String[fieldCount];
- for (int i = 0; i < numTuples; i++) {
- // Set keys.
- for (int j = 0; j < numKeyFields; j++) {
- int length = (Math.abs(rnd.nextInt()) % 10) + 1;
- tupleValues[j] = getRandomString(length, rnd);
- }
- // Set values.
- for (int j = numKeyFields; j < fieldCount; j++) {
- tupleValues[j] = getRandomString(5, rnd);
- }
- // Set expected values. We also use these as the pre-sorted stream for bulk loading.
- CheckTuple<String> checkTuple = new CheckTuple<String>(fieldCount, numKeyFields);
- for(String v : tupleValues) {
- checkTuple.add(v);
- }
- testCtx.checkTuples.add(checkTuple);
- }
-
- bulkLoadCheckTuples(testCtx, numTuples);
- }
-
- private static void bulkLoadCheckTuples(BTreeTestContext testCtx, int numTuples) throws HyracksDataException, TreeIndexException, PageAllocationException {
- int fieldCount = testCtx.getFieldCount();
- ArrayTupleBuilder tupleBuilder = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference tuple = new ArrayTupleReference();
- // Perform bulk load.
- IIndexBulkLoadContext bulkLoadCtx = testCtx.btree.beginBulkLoad(0.7f);
- int c = 1;
- for (CheckTuple checkTuple : testCtx.checkTuples) {
- if (LOGGER.isLoggable(Level.INFO)) {
- if (c % (numTuples / 10) == 0) {
- LOGGER.info("Bulk Loading Tuple " + c + "/" + numTuples);
- }
- }
- createTupleFromCheckTuple(checkTuple, tupleBuilder, tuple, testCtx.fieldSerdes);
- testCtx.btree.bulkLoadAddTuple(tuple, bulkLoadCtx);
- c++;
- }
- testCtx.btree.endBulkLoad(bulkLoadCtx);
- }
-
- public static void deleteTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
- ArrayTupleBuilder deleteTupleBuilder = new ArrayTupleBuilder(testCtx.btree.getMultiComparator().getKeyFieldCount());
- ArrayTupleReference deleteTuple = new ArrayTupleReference();
- int numCheckTuples = testCtx.checkTuples.size();
- // Copy CheckTuple references into array, so we can randomly pick from there.
- CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
- int idx = 0;
- for (CheckTuple checkTuple : testCtx.checkTuples) {
- checkTuples[idx++] = checkTuple;
- }
- for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
- if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
- LOGGER.info("Deleting Tuple " + (i + 1) + "/" + numTuples);
- }
- }
- int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
- CheckTuple checkTuple = checkTuples[checkTupleIdx];
- createTupleFromCheckTuple(checkTuple, deleteTupleBuilder, deleteTuple, testCtx.fieldSerdes);
- testCtx.indexAccessor.delete(deleteTuple);
-
- // Remove check tuple from expected results.
- testCtx.checkTuples.remove(checkTuple);
-
- // Swap with last "valid" CheckTuple.
- CheckTuple tmp = checkTuples[numCheckTuples - 1];
- checkTuples[numCheckTuples - 1] = checkTuple;
- checkTuples[checkTupleIdx] = tmp;
- numCheckTuples--;
- }
- }
-
- @SuppressWarnings("unchecked")
- public static void updateTuples(BTreeTestContext testCtx, int numTuples, Random rnd) throws Exception {
- int fieldCount = testCtx.btree.getFieldCount();
- int keyFieldCount = testCtx.btree.getMultiComparator().getKeyFieldCount();
- // This is a noop because we can only update non-key fields.
- if (fieldCount == keyFieldCount) {
- return;
- }
- ArrayTupleBuilder updateTupleBuilder = new ArrayTupleBuilder(fieldCount);
- ArrayTupleReference updateTuple = new ArrayTupleReference();
- int numCheckTuples = testCtx.checkTuples.size();
- // Copy CheckTuple references into array, so we can randomly pick from there.
- CheckTuple[] checkTuples = new CheckTuple[numCheckTuples];
- int idx = 0;
- for (CheckTuple checkTuple : testCtx.checkTuples) {
- checkTuples[idx++] = checkTuple;
- }
- for (int i = 0; i < numTuples && numCheckTuples > 0; i++) {
- if (LOGGER.isLoggable(Level.INFO)) {
- if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
- LOGGER.info("Updating Tuple " + (i + 1) + "/" + numTuples);
- }
- }
- int checkTupleIdx = Math.abs(rnd.nextInt() % numCheckTuples);
- CheckTuple checkTuple = checkTuples[checkTupleIdx];
- // Update check tuple's non-key fields.
- for (int j = keyFieldCount; j < fieldCount; j++) {
- Comparable newValue = getRandomUpdateValue(testCtx.fieldSerdes[j], rnd);
- checkTuple.set(j, newValue);
- }
-
- createTupleFromCheckTuple(checkTuple, updateTupleBuilder, updateTuple, testCtx.fieldSerdes);
- testCtx.indexAccessor.update(updateTuple);
-
- // Swap with last "valid" CheckTuple.
- CheckTuple tmp = checkTuples[numCheckTuples - 1];
- checkTuples[numCheckTuples - 1] = checkTuple;
- checkTuples[checkTupleIdx] = tmp;
- numCheckTuples--;
- }
- }
-
- private static Comparable getRandomUpdateValue(ISerializerDeserializer serde, Random rnd) {
- if (serde instanceof IntegerSerializerDeserializer) {
- return Integer.valueOf(rnd.nextInt());
- } else if (serde instanceof UTF8StringSerializerDeserializer) {
- return getRandomString(10, rnd);
- }
- return null;
- }
-
- public static String getRandomString(int length, Random rnd) {
- String s = Long.toHexString(Double.doubleToLongBits(rnd.nextDouble()));
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < s.length() && i < length; i++) {
- strBuilder.append(s.charAt(Math.abs(rnd.nextInt()) % s.length()));
- }
- return strBuilder.toString();
- }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java b/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java
deleted file mode 100644
index f945ab9..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/util/CheckTuple.java
+++ /dev/null
@@ -1,73 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.btree.util;
-
-@SuppressWarnings({"rawtypes", "unchecked"})
-public class CheckTuple<T extends Comparable<T>> implements Comparable<T> {
- private final int numKeys;
- private final Comparable[] tuple;
- private int pos;
-
- public CheckTuple(int numFields, int numKeys) {
- this.numKeys = numKeys;
- this.tuple = new Comparable[numFields];
- pos = 0;
- }
-
- public void add(T e) {
- tuple[pos++] = e;
- }
-
- @Override
- public int compareTo(T o) {
- CheckTuple<T> other = (CheckTuple<T>)o;
- for (int i = 0; i < numKeys; i++) {
- int cmp = tuple[i].compareTo(other.get(i));
- if (cmp != 0) {
- return cmp;
- }
- }
- return 0;
- }
-
- public T get(int idx) {
- return (T)tuple[idx];
- }
-
- public void set(int idx, T e) {
- tuple[idx] = e;
- }
-
- public int size() {
- return tuple.length;
- }
-
- public int getNumKeys() {
- return numKeys;
- }
-
- @Override
- public String toString() {
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < tuple.length; i++) {
- strBuilder.append(tuple[i].toString());
- if (i != tuple.length-1) {
- strBuilder.append(" ");
- }
- }
- return strBuilder.toString();
- }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
index 963903a..5bf44d3 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
@@ -27,12 +27,6 @@
<dependencies>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-control-nc</artifactId>
- <version>0.2.1-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-invertedindex</artifactId>
<version>0.2.1-SNAPSHOT</version>
<type>jar</type>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
index 7b6bd9f..e086af6 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/AbstractInvIndexSearchTest.java
@@ -15,18 +15,15 @@
package edu.uci.ics.hyracks.storage.am.invertedindex;
-import java.io.DataOutput;
import java.io.File;
-import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Random;
import org.junit.After;
import org.junit.Before;
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
@@ -36,26 +33,25 @@
import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
import edu.uci.ics.hyracks.data.std.primitive.UTF8StringPointable;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.IBinaryTokenizer;
import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.ITokenFactory;
import edu.uci.ics.hyracks.storage.am.invertedindex.util.InvertedIndexUtils;
@@ -86,8 +82,7 @@
// declare btree keys
protected int btreeKeyFieldCount = 1;
- protected IBinaryComparator[] btreeBinCmps = new IBinaryComparator[btreeKeyFieldCount];
- protected MultiComparator btreeCmp = new MultiComparator(btreeBinCmps);
+ protected IBinaryComparatorFactory[] btreeCmpFactories = new IBinaryComparatorFactory[btreeKeyFieldCount];
// btree frame factories
protected TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(btreeTypeTraits);
@@ -112,48 +107,38 @@
protected ITypeTraits[] invListTypeTraits = new ITypeTraits[invListFields];
protected int invListKeys = 1;
- protected IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
- protected MultiComparator invListCmp = new MultiComparator(invListBinCmps);
+ protected IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[invListKeys];
protected InvertedIndex invIndex;
protected Random rnd = new Random();
- protected ByteBuffer frame = taskCtx.allocateFrame();
- protected FrameTupleAppender appender = new FrameTupleAppender(taskCtx.getFrameSize());
protected ArrayTupleBuilder tb = new ArrayTupleBuilder(2);
- protected DataOutput dos = tb.getDataOutput();
+ protected ArrayTupleReference tuple = new ArrayTupleReference();
protected ISerializerDeserializer[] insertSerde = { UTF8StringSerializerDeserializer.INSTANCE,
IntegerSerializerDeserializer.INSTANCE };
protected RecordDescriptor insertRecDesc = new RecordDescriptor(insertSerde);
- protected IFrameTupleAccessor accessor = new FrameTupleAccessor(taskCtx.getFrameSize(), insertRecDesc);
-
- protected FrameTupleReference tuple = new FrameTupleReference();
protected ArrayList<ArrayList<Integer>> checkInvLists = new ArrayList<ArrayList<Integer>>();
protected int maxId = 1000000;
- // protected int maxId = 1000;
protected int[] scanCountArray = new int[maxId];
protected ArrayList<Integer> expectedResults = new ArrayList<Integer>();
protected ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
protected RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
- protected FrameTupleAppender queryAppender = new FrameTupleAppender(taskCtx.getFrameSize());
protected ArrayTupleBuilder queryTb = new ArrayTupleBuilder(querySerde.length);
- protected DataOutput queryDos = queryTb.getDataOutput();
-
- protected IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(taskCtx.getFrameSize(), queryRecDesc);
- protected FrameTupleReference queryTuple = new FrameTupleReference();
+ protected ArrayTupleReference queryTuple = new ArrayTupleReference();
protected ITokenFactory tokenFactory;
protected IBinaryTokenizer tokenizer;
- protected TOccurrenceSearcher searcher;
- protected IInvertedIndexResultCursor resultCursor;
+ protected IIndexCursor resultCursor;
+ protected abstract void setTokenizer();
+
/**
* Initialize members, generate data, and bulk load the inverted index.
*/
@@ -169,31 +154,31 @@
btreeFileId = fmp.lookupFileId(btreeFile);
bufferCache.openFile(btreeFileId);
- btreeBinCmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
+ btreeCmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
- freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+ freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
- btree = new BTree(bufferCache, btreeTypeTraits.length, btreeCmp, freePageManager, interiorFrameFactory,
- leafFrameFactory);
+ btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, btreeTypeTraits.length, btreeCmpFactories,
+ freePageManager, interiorFrameFactory, leafFrameFactory);
btree.create(btreeFileId);
btree.open(btreeFileId);
// --- INVERTED INDEX ---
+ setTokenizer();
+
bufferCache.createFile(invListsFile);
invListsFileId = fmp.lookupFileId(invListsFile);
bufferCache.openFile(invListsFileId);
invListTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
- invListBinCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+ invListCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
- invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmp);
+ IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
+ invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmpFactories, invListBuilder);
invIndex.open(invListsFileId);
rnd.setSeed(50);
-
- accessor.reset(frame);
- queryAccessor.reset(frame);
}
@After
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
index 93934ed..9fdc1c4 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/BulkLoadTest.java
@@ -29,7 +29,7 @@
import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
@@ -51,6 +51,7 @@
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
@@ -58,6 +59,7 @@
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.NoOpOperationCallback;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
@@ -109,10 +111,8 @@
// declare btree keys
int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY).createBinaryComparator();
-
- MultiComparator btreeCmp = new MultiComparator(cmps);
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(UTF8StringPointable.FACTORY);
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(btreeTypeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
@@ -121,10 +121,10 @@
ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, btreeFileId, 0, metaFrameFactory);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
- BTree btree = new BTree(bufferCache, btreeTypeTraits.length, btreeCmp, freePageManager, interiorFrameFactory,
- leafFrameFactory);
+ BTree btree = new BTree(bufferCache, NoOpOperationCallback.INSTANCE, btreeTypeTraits.length, cmpFactories,
+ freePageManager, interiorFrameFactory, leafFrameFactory);
btree.create(btreeFileId);
btree.open(btreeFileId);
@@ -133,12 +133,11 @@
invListTypeTraits[0] = IntegerPointable.TYPE_TRAITS;
int invListKeys = 1;
- IBinaryComparator[] invListBinCmps = new IBinaryComparator[invListKeys];
- invListBinCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+ IBinaryComparatorFactory[] invListCmpFactories = new IBinaryComparatorFactory[invListKeys];
+ invListCmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
- MultiComparator invListCmp = new MultiComparator(invListBinCmps);
-
- InvertedIndex invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmp);
+ IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
+ InvertedIndex invIndex = new InvertedIndex(bufferCache, btree, invListTypeTraits, invListCmpFactories, invListBuilder);
invIndex.open(invListsFileId);
Random rnd = new Random();
@@ -176,19 +175,14 @@
int addProb = 0;
int addProbStep = 10;
- IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(invListTypeTraits);
- InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(invListBuilder, HYRACKS_FRAME_SIZE,
- BTree.DEFAULT_FILL_FACTOR);
+ IIndexBulkLoadContext ctx = invIndex.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
- int totalElements = 0;
for (int i = 0; i < tokens.size(); i++) {
addProb += addProbStep * (i + 1);
for (int j = 0; j < maxId; j++) {
if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
- totalElements++;
-
tb.reset();
UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), dos);
tb.addFieldEndOffset();
@@ -203,7 +197,7 @@
tuple.reset(accessor, 0);
try {
- invIndex.bulkLoadAddTuple(ctx, tuple);
+ invIndex.bulkLoadAddTuple(tuple, ctx);
} catch (Exception e) {
e.printStackTrace();
}
@@ -216,7 +210,8 @@
ITreeIndexCursor btreeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame, false);
FrameTupleReference searchKey = new FrameTupleReference();
- RangePredicate btreePred = new RangePredicate(true, searchKey, searchKey, true, true, btreeCmp, btreeCmp);
+ MultiComparator btreeCmp = MultiComparator.create(cmpFactories);
+ RangePredicate btreePred = new RangePredicate(searchKey, searchKey, true, true, btreeCmp, btreeCmp);
IInvertedListCursor invListCursor = new FixedSizeElementInvertedListCursor(bufferCache, invListsFileId,
invListTypeTraits);
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
index 1299fcb..33bf0c5 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchPerfTest.java
@@ -28,14 +28,12 @@
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex.InvertedIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndexSearchPredicate;
import edu.uci.ics.hyracks.storage.am.invertedindex.impls.OccurrenceThresholdPanicException;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.SearchResultCursor;
import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifier;
import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.JaccardSearchModifier;
@@ -55,20 +53,20 @@
protected List<String> tokens = new ArrayList<String>();
- @Before
- public void start() throws Exception {
- super.start();
+ @Override
+ protected void setTokenizer() {
tokenFactory = new UTF8WordTokenFactory();
tokenizer = new DelimitedUTF8StringBinaryTokenizer(true, false,
tokenFactory);
- searcher = new TOccurrenceSearcher(taskCtx, invIndex, tokenizer);
- resultCursor = new SearchResultCursor(
- searcher.createResultFrameTupleAccessor(),
- searcher.createResultTupleReference());
+ }
+
+ @Before
+ public void start() throws Exception {
+ super.start();
loadData();
}
- public void loadData() throws HyracksDataException, TreeIndexException, PageAllocationException {
+ public void loadData() throws HyracksDataException, TreeIndexException {
tokens.add("compilers");
tokens.add("computer");
tokens.add("databases");
@@ -87,37 +85,23 @@
int addProb = 0;
int addProbStep = 10;
- IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
- invListTypeTraits);
- InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(
- invListBuilder, HYRACKS_FRAME_SIZE, BTree.DEFAULT_FILL_FACTOR);
+ IIndexBulkLoadContext ctx = invIndex.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
- int totalElements = 0;
for (int i = 0; i < tokens.size(); i++) {
addProb += addProbStep * (i + 1);
for (int j = 0; j < maxId; j++) {
if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
-
- totalElements++;
-
tb.reset();
UTF8StringSerializerDeserializer.INSTANCE.serialize(
- tokens.get(i), dos);
+ tokens.get(i), tb.getDataOutput());
tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
+ IntegerSerializerDeserializer.INSTANCE.serialize(j, tb.getDataOutput());
tb.addFieldEndOffset();
-
+ tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
checkInvLists.get(i).add(j);
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(),
- 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
try {
- invIndex.bulkLoadAddTuple(ctx, tuple);
+ invIndex.bulkLoadAddTuple(tuple, ctx);
} catch (Exception e) {
e.printStackTrace();
}
@@ -170,6 +154,9 @@
rnd.setSeed(50);
+ InvertedIndexAccessor accessor = (InvertedIndexAccessor) invIndex.createAccessor();
+ InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
+
// generate random queries
int[] queryTokenIndexes = new int[tokens.size()];
for (int i = 0; i < numQueries; i++) {
@@ -182,31 +169,34 @@
StringBuilder strBuilder = new StringBuilder();
for (int j = 0; j < numQueryTokens; j++) {
strBuilder.append(tokens.get(queryTokenIndexes[j]));
- if (j + 1 != numQueryTokens)
+ if (j + 1 != numQueryTokens) {
strBuilder.append(" ");
+ }
}
String queryString = strBuilder.toString();
+ // Serialize query.
queryTb.reset();
UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
- queryDos);
+ queryTb.getDataOutput());
queryTb.addFieldEndOffset();
+ queryTuple.reset(queryTb.getFieldEndOffsets(), queryTb.getByteArray());
- queryAppender.reset(frame, true);
- queryAppender.append(queryTb.getFieldEndOffsets(),
- queryTb.getByteArray(), 0, queryTb.getSize());
- queryTuple.reset(queryAccessor, 0);
-
+ // Set query tuple in search predicate.
+ searchPred.setQueryTuple(queryTuple);
+ searchPred.setQueryFieldIndex(0);
+
boolean panic = false;
+ resultCursor = accessor.createSearchCursor();
int repeats = 1;
double totalTime = 0;
for (int j = 0; j < repeats; j++) {
long timeStart = System.currentTimeMillis();
try {
- searcher.reset();
- searcher.search(resultCursor, queryTuple, 0, searchModifier);
+ resultCursor.reset();
+ accessor.search(resultCursor, searchPred);
} catch (OccurrenceThresholdPanicException e) {
panic = true;
}
@@ -220,10 +210,9 @@
}
if (!panic) {
-
+ TOccurrenceSearcher searcher = (TOccurrenceSearcher) accessor.getSearcher();
fillExpectedResults(queryTokenIndexes, numQueryTokens,
searcher.getOccurrenceThreshold());
-
// verify results
int checkIndex = 0;
while (resultCursor.hasNext()) {
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
index eb2b39c..c3c9b99 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
+++ b/hyracks/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/SearchTest.java
@@ -25,20 +25,18 @@
import org.junit.Before;
import org.junit.Test;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.data.std.util.ByteArrayAccessibleOutputStream;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
-import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexSearchModifier;
-import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.FixedSizeElementInvertedListBuilder;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndex.InvertedIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.InvertedIndexSearchPredicate;
import edu.uci.ics.hyracks.storage.am.invertedindex.impls.OccurrenceThresholdPanicException;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.SearchResultCursor;
-import edu.uci.ics.hyracks.storage.am.invertedindex.impls.TOccurrenceSearcher;
import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.ConjunctiveSearchModifier;
import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.EditDistanceSearchModifier;
import edu.uci.ics.hyracks.storage.am.invertedindex.searchmodifiers.JaccardSearchModifier;
@@ -52,16 +50,22 @@
protected List<String> firstNames = new ArrayList<String>();
protected List<String> lastNames = new ArrayList<String>();
- @Before
- public void start() throws Exception {
- super.start();
+ protected IBinaryComparator[] btreeBinCmps;
+
+ @Override
+ protected void setTokenizer() {
tokenFactory = new UTF8NGramTokenFactory();
tokenizer = new NGramUTF8StringBinaryTokenizer(3, false, true, false,
tokenFactory);
- searcher = new TOccurrenceSearcher(taskCtx, invIndex, tokenizer);
- resultCursor = new SearchResultCursor(
- searcher.createResultFrameTupleAccessor(),
- searcher.createResultTupleReference());
+ }
+
+ @Before
+ public void start() throws Exception {
+ super.start();
+ btreeBinCmps = new IBinaryComparator[btreeCmpFactories.length];
+ for (int i = 0; i < btreeCmpFactories.length; i++) {
+ btreeBinCmps[i] = btreeCmpFactories[i].createBinaryComparator();
+ }
generateDataStrings();
loadData();
}
@@ -130,7 +134,7 @@
}
}
- public void loadData() throws IOException, TreeIndexException, PageAllocationException {
+ public void loadData() throws IOException, TreeIndexException {
List<TokenIdPair> pairs = new ArrayList<TokenIdPair>();
// generate pairs for subsequent sorting and bulk-loading
int id = 0;
@@ -139,38 +143,28 @@
DataOutputStream dos = new DataOutputStream(baaos);
UTF8StringSerializerDeserializer.INSTANCE.serialize(s, dos);
tokenizer.reset(baaos.getByteArray(), 0, baaos.size());
- int tokenCount = 0;
while (tokenizer.hasNext()) {
tokenizer.next();
IToken token = tokenizer.getToken();
pairs.add(new TokenIdPair(token, id));
- ++tokenCount;
}
++id;
}
Collections.sort(pairs);
// bulk load index
- IInvertedListBuilder invListBuilder = new FixedSizeElementInvertedListBuilder(
- invListTypeTraits);
- InvertedIndex.BulkLoadContext ctx = invIndex.beginBulkLoad(
- invListBuilder, HYRACKS_FRAME_SIZE, BTree.DEFAULT_FILL_FACTOR);
+ IIndexBulkLoadContext ctx = invIndex.beginBulkLoad(BTree.DEFAULT_FILL_FACTOR);
for (TokenIdPair t : pairs) {
tb.reset();
tb.addField(t.baaos.getByteArray(), 0,
t.baaos.getByteArray().length);
- IntegerSerializerDeserializer.INSTANCE.serialize(t.id, dos);
+ IntegerSerializerDeserializer.INSTANCE.serialize(t.id, tb.getDataOutput());
tb.addFieldEndOffset();
-
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
- tb.getSize());
-
- tuple.reset(accessor, 0);
+ tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
try {
- invIndex.bulkLoadAddTuple(ctx, tuple);
+ invIndex.bulkLoadAddTuple(tuple, ctx);
} catch (Exception e) {
e.printStackTrace();
}
@@ -188,28 +182,34 @@
rnd.setSeed(50);
+ InvertedIndexAccessor accessor = (InvertedIndexAccessor) invIndex.createAccessor();
+ InvertedIndexSearchPredicate searchPred = new InvertedIndexSearchPredicate(tokenizer, searchModifier);
+
for (int i = 0; i < numQueries; i++) {
int queryIndex = Math.abs(rnd.nextInt() % dataStrings.size());
String queryString = dataStrings.get(queryIndex);
+ // Serialize query.
queryTb.reset();
UTF8StringSerializerDeserializer.INSTANCE.serialize(queryString,
- queryDos);
+ queryTb.getDataOutput());
queryTb.addFieldEndOffset();
+ queryTuple.reset(queryTb.getFieldEndOffsets(), queryTb.getByteArray());
- queryAppender.reset(frame, true);
- queryAppender.append(queryTb.getFieldEndOffsets(),
- queryTb.getByteArray(), 0, queryTb.getSize());
- queryTuple.reset(queryAccessor, 0);
-
+ // Set query tuple in search predicate.
+ searchPred.setQueryTuple(queryTuple);
+ searchPred.setQueryFieldIndex(0);
+
+ resultCursor = accessor.createSearchCursor();
+
int repeats = 1;
double totalTime = 0;
for (int j = 0; j < repeats; j++) {
long timeStart = System.currentTimeMillis();
try {
- searcher.reset();
- searcher.search(resultCursor, queryTuple, 0, searchModifier);
+ resultCursor.reset();
+ accessor.search(resultCursor, searchPred);
} catch (OccurrenceThresholdPanicException e) {
// ignore panic queries
}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
index 5800867..e071098 100644
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/pom.xml
@@ -33,12 +33,6 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-control-nc</artifactId>
- <version>0.2.1-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-am-rtree</artifactId>
<version>0.2.1-SNAPSHOT</version>
<type>jar</type>
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
deleted file mode 100644
index 8f619a4..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
+++ /dev/null
@@ -1,40 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.logging.Logger;
-
-import org.junit.AfterClass;
-
-public abstract class AbstractRTreeTest {
-
- protected static final Logger LOGGER = Logger.getLogger(AbstractRTreeTest.class.getName());
- protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat(
- "ddMMyy-hhmmssSS");
- protected final static String tmpDir = System.getProperty("java.io.tmpdir");
- protected final static String sep = System.getProperty("file.separator");
- protected final static String fileName = tmpDir + sep
- + simpleDateFormat.format(new Date());
-
- @AfterClass
- public static void cleanup() throws Exception {
- File f = new File(fileName);
- f.deleteOnExit();
- }
-}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
new file mode 100644
index 0000000..58bca10
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeBulkLoadTest.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeBulkLoadTest;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class RTreeBulkLoadTest extends AbstractRTreeBulkLoadTest {
+
+ public RTreeBulkLoadTest() {
+ super(1);
+ }
+
+ private final RTreeTestHarness harness = new RTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception {
+ return RTreeTestContext.create(harness.getBufferCache(), harness.getTreeFileId(), fieldSerdes,
+ valueProviderFactories, numKeys);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java
new file mode 100644
index 0000000..42e933e
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeDeleteTest.java
@@ -0,0 +1,57 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeDeleteTest;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+@SuppressWarnings("rawtypes")
+public class RTreeDeleteTest extends AbstractRTreeDeleteTest {
+
+ private final RTreeTestHarness harness = new RTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception {
+ return RTreeTestContext.create(harness.getBufferCache(), harness.getTreeFileId(), fieldSerdes,
+ valueProviderFactories, numKeys);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java
new file mode 100644
index 0000000..c72338e
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeExamplesTest.java
@@ -0,0 +1,56 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeExamplesTest;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+public class RTreeExamplesTest extends AbstractRTreeExamplesTest {
+ private final RTreeTestHarness harness = new RTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+ IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories)
+ throws TreeIndexException {
+ return RTreeUtils.createRTree(harness.getBufferCache(), typeTraits,
+ valueProviderFactories, rtreeCmpFactories);
+ }
+
+ @Override
+ protected int getIndexFileId() {
+ return harness.getTreeFileId();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java
new file mode 100644
index 0000000..6efa620
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeInsertTest.java
@@ -0,0 +1,67 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.Random;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeInsertTest;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+/**
+ * Tests the BTree insert operation with strings and integer fields using
+ * various numbers of key and payload fields.
+ *
+ * Each tests first fills a BTree with randomly generated tuples. We compare the
+ * following operations against expected results: 1. Point searches for all
+ * tuples. 2. Ordered scan. 3. Disk-order scan. 4. Range search (and prefix
+ * search for composite keys).
+ *
+ */
+@SuppressWarnings("rawtypes")
+public class RTreeInsertTest extends AbstractRTreeInsertTest {
+
+ private final RTreeTestHarness harness = new RTreeTestHarness();
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected AbstractRTreeTestContext createTestContext(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys) throws Exception {
+ return RTreeTestContext.create(harness.getBufferCache(), harness.getTreeFileId(), fieldSerdes,
+ valueProviderFactories, numKeys);
+ }
+
+ @Override
+ protected Random getRandom() {
+ return harness.getRandom();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
new file mode 100644
index 0000000..8332c11
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeSearchCursorTest.java
@@ -0,0 +1,175 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree;
+
+import java.util.ArrayList;
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.RTreeCheckTuple;
+import edu.uci.ics.hyracks.storage.am.rtree.RTreeTestUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.AbstractRTreeTest;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class RTreeSearchCursorTest extends AbstractRTreeTest {
+
+ private final RTreeTestUtils rTreeTestUtils;
+ private Random rnd = new Random(50);
+
+ public RTreeSearchCursorTest() {
+ this.rTreeTestUtils = new RTreeTestUtils();
+ }
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ super.setUp();
+ }
+
+ @SuppressWarnings({ "unchecked", "rawtypes" })
+ @Test
+ public void rangeSearchTest() throws Exception {
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("TESTING RANGE SEARCH CURSOR FOR RTREE");
+ }
+
+ IBufferCache bufferCache = harness.getBufferCache();
+ int rtreeFileId = harness.getTreeFileId();
+
+ // Declare fields.
+ int fieldCount = 5;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+ // Declare field serdes.
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+ // Declare keys.
+ int keyFieldCount = 4;
+ IBinaryComparatorFactory[] cmpFactories = new IBinaryComparatorFactory[keyFieldCount];
+ cmpFactories[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ cmpFactories[1] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ cmpFactories[2] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+ cmpFactories[3] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY);
+
+ // create value providers
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ cmpFactories.length, IntegerPointable.FACTORY);
+
+ RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ valueProviderFactories);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
+ valueProviderFactories);
+
+ IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
+ IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, 0, metaFrameFactory);
+
+ RTree rtree = new RTree(bufferCache, fieldCount, cmpFactories, freePageManager, interiorFrameFactory,
+ leafFrameFactory);
+ rtree.create(rtreeFileId);
+ rtree.open(rtreeFileId);
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ ArrayTupleReference tuple = new ArrayTupleReference();
+ ITreeIndexAccessor indexAccessor = rtree.createAccessor();
+ int numInserts = 10000;
+ ArrayList<RTreeCheckTuple> checkTuples = new ArrayList<RTreeCheckTuple>();
+ for (int i = 0; i < numInserts; i++) {
+ int p1x = rnd.nextInt();
+ int p1y = rnd.nextInt();
+ int p2x = rnd.nextInt();
+ int p2y = rnd.nextInt();
+
+ int pk = rnd.nextInt();;
+
+ TupleUtils.createIntegerTuple(tb, tuple, Math.min(p1x, p2x), Math.min(p1y, p2y), Math.max(p1x, p2x),
+ Math.max(p1y, p2y), pk);
+ try {
+ indexAccessor.insert(tuple);
+ } catch (TreeIndexException e) {
+ }
+ RTreeCheckTuple checkTuple = new RTreeCheckTuple(fieldCount, keyFieldCount);
+ checkTuple.add(Math.min(p1x, p2x));
+ checkTuple.add(Math.min(p1y, p2y));
+ checkTuple.add(Math.max(p1x, p2x));
+ checkTuple.add(Math.max(p1y, p2y));
+ checkTuple.add(pk);
+
+ checkTuples.add(checkTuple);
+ }
+
+ // Build key.
+ ArrayTupleBuilder keyTb = new ArrayTupleBuilder(keyFieldCount);
+ ArrayTupleReference key = new ArrayTupleReference();
+ TupleUtils.createIntegerTuple(keyTb, key, -1000, -1000, 1000, 1000);
+
+ MultiComparator cmp = MultiComparator.create(cmpFactories);
+ ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
+ SearchPredicate searchPredicate = new SearchPredicate(key, cmp);
+
+ RTreeCheckTuple keyCheck = (RTreeCheckTuple) rTreeTestUtils.createCheckTupleFromTuple(key, fieldSerdes,
+ keyFieldCount);
+ ArrayList<RTreeCheckTuple> expectedResult = rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
+
+ rTreeTestUtils.getRangeSearchExpectedResults(checkTuples, keyCheck);
+ indexAccessor.search(searchCursor, searchPredicate);
+
+ rTreeTestUtils.checkExpectedResults(searchCursor, expectedResult, fieldSerdes, keyFieldCount, null);
+
+ rtree.close();
+ }
+
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
deleted file mode 100644
index 9fe2c94..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
+++ /dev/null
@@ -1,769 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.io.DataOutput;
-import java.io.File;
-import java.nio.ByteBuffer;
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStats;
-import edu.uci.ics.hyracks.storage.am.common.util.TreeIndexStatsGatherer;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class RTreeTest extends AbstractRTreeTest {
- private static final int PAGE_SIZE = 256;
- private static final int NUM_PAGES = 10;
- private static final int MAX_OPEN_FILES = 10;
- private static final int HYRACKS_FRAME_SIZE = 128;
- private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
- // create an R-tree of two dimensions
- // fill the R-tree with random values using insertions
- // perform ordered scan
- @Test
- public void test01() throws Exception {
-
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
-
- // declare keys
- int keyFieldCount = 4;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
- cmps[1] = cmps[0];
- cmps[2] = cmps[0];
- cmps[3] = cmps[0];
-
- // declare tuple fields
- int fieldCount = 7;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = DoublePointable.TYPE_TRAITS;
- typeTraits[1] = DoublePointable.TYPE_TRAITS;
- typeTraits[2] = DoublePointable.TYPE_TRAITS;
- typeTraits[3] = DoublePointable.TYPE_TRAITS;
- typeTraits[4] = DoublePointable.TYPE_TRAITS;
- typeTraits[5] = DoublePointable.TYPE_TRAITS;
- typeTraits[6] = DoublePointable.TYPE_TRAITS;
-
- // create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- cmps.length, DoublePointable.FACTORY);
-
- MultiComparator cmp = new MultiComparator(cmps);
-
- RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
-
- ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
- IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
- IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
- rtree.create(fileId);
- rtree.open(fileId);
-
- ByteBuffer hyracksFrame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- DataOutput dos = tb.getDataOutput();
-
- @SuppressWarnings("rawtypes")
- ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
- accessor.reset(hyracksFrame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- ITreeIndexAccessor indexAccessor = rtree.createAccessor();
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- Random rnd2 = new Random();
- rnd2.setSeed(50);
- for (int i = 0; i < 5000; i++) {
-
- double p1x = rnd.nextDouble();
- double p1y = rnd.nextDouble();
- double p2x = rnd.nextDouble();
- double p2y = rnd.nextDouble();
-
- double pk1 = rnd2.nextDouble();
- int pk2 = rnd2.nextInt();
- double pk3 = rnd2.nextDouble();
-
- tb.reset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
- tb.addFieldEndOffset();
-
- appender.reset(hyracksFrame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
- + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
- }
- }
-
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- String rtreeStats = rtree.printStats();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rtreeStats);
- }
-
- // disk-order scan
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("DISK-ORDER SCAN:");
- }
- TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
- indexAccessor.diskOrderScan(diskOrderCursor);
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference frameTuple = diskOrderCursor.getTuple();
- String rec = TupleUtils.printTuple(frameTuple, recDescSers);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rec);
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- diskOrderCursor.close();
- }
-
- TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
- rtree.getRootPageId());
- TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
- String string = stats.toString();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(string);
- }
-
- rtree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
-
- }
-
- // create an R-tree of two dimensions
- // fill the R-tree with random values using insertions
- // and then delete all the tuples which result of an empty R-tree
- @Test
- public void test02() throws Exception {
-
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
-
- // declare keys
- int keyFieldCount = 4;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
- cmps[1] = cmps[0];
- cmps[2] = cmps[0];
- cmps[3] = cmps[0];
-
- // declare tuple fields
- int fieldCount = 7;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = DoublePointable.TYPE_TRAITS;
- typeTraits[1] = DoublePointable.TYPE_TRAITS;
- typeTraits[2] = DoublePointable.TYPE_TRAITS;
- typeTraits[3] = DoublePointable.TYPE_TRAITS;
- typeTraits[4] = DoublePointable.TYPE_TRAITS;
- typeTraits[5] = DoublePointable.TYPE_TRAITS;
- typeTraits[6] = DoublePointable.TYPE_TRAITS;
-
- // create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- cmps.length, DoublePointable.FACTORY);
-
- MultiComparator cmp = new MultiComparator(cmps);
-
- RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
-
- ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
- IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
- IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
- rtree.create(fileId);
- rtree.open(fileId);
-
- ByteBuffer hyracksFrame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- DataOutput dos = tb.getDataOutput();
-
- @SuppressWarnings("rawtypes")
- ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
- accessor.reset(hyracksFrame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- ITreeIndexAccessor indexAccessor = rtree.createAccessor();
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- for (int i = 0; i < 5000; i++) {
-
- double p1x = rnd.nextDouble();
- double p1y = rnd.nextDouble();
- double p2x = rnd.nextDouble();
- double p2y = rnd.nextDouble();
-
- double pk1 = rnd.nextDouble();
- int pk2 = rnd.nextInt();
- double pk3 = rnd.nextDouble();
-
- tb.reset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
- tb.addFieldEndOffset();
-
- appender.reset(hyracksFrame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
- + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
- }
- }
-
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- String rtreeStats = rtree.printStats();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rtreeStats);
- }
-
- rnd.setSeed(50);
- for (int i = 0; i < 5000; i++) {
-
- double p1x = rnd.nextDouble();
- double p1y = rnd.nextDouble();
- double p2x = rnd.nextDouble();
- double p2y = rnd.nextDouble();
-
- double pk1 = rnd.nextDouble();
- int pk2 = rnd.nextInt();
- double pk3 = rnd.nextDouble();
-
- tb.reset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
- tb.addFieldEndOffset();
-
- appender.reset(hyracksFrame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("DELETING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
- + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
- }
- }
-
- try {
- indexAccessor.delete(tuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
- rtree.getRootPageId());
- TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
- String string = stats.toString();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(string);
- }
-
- rtree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
-
- }
-
- // create an R-tree of three dimensions
- // fill the R-tree with random values using insertions
- // perform ordered scan
- @Test
- public void test03() throws Exception {
-
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
-
- // declare keys
- int keyFieldCount = 6;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
- cmps[1] = cmps[0];
- cmps[2] = cmps[0];
- cmps[3] = cmps[0];
- cmps[4] = cmps[0];
- cmps[5] = cmps[0];
-
- // declare tuple fields
- int fieldCount = 9;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = DoublePointable.TYPE_TRAITS;
- typeTraits[1] = DoublePointable.TYPE_TRAITS;
- typeTraits[2] = DoublePointable.TYPE_TRAITS;
- typeTraits[3] = DoublePointable.TYPE_TRAITS;
- typeTraits[4] = DoublePointable.TYPE_TRAITS;
- typeTraits[5] = DoublePointable.TYPE_TRAITS;
- typeTraits[6] = DoublePointable.TYPE_TRAITS;
- typeTraits[7] = IntegerPointable.TYPE_TRAITS;
- typeTraits[8] = DoublePointable.TYPE_TRAITS;
-
- // create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- cmps.length, DoublePointable.FACTORY);
-
- MultiComparator cmp = new MultiComparator(cmps);
-
- RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
-
- ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
- IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
- IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
- rtree.create(fileId);
- rtree.open(fileId);
-
- ByteBuffer hyracksFrame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- DataOutput dos = tb.getDataOutput();
-
- @SuppressWarnings("rawtypes")
- ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
- accessor.reset(hyracksFrame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- ITreeIndexAccessor indexAccessor = rtree.createAccessor();
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- for (int i = 0; i < 5000; i++) {
-
- double p1x = rnd.nextDouble();
- double p1y = rnd.nextDouble();
- double p1z = rnd.nextDouble();
- double p2x = rnd.nextDouble();
- double p2y = rnd.nextDouble();
- double p2z = rnd.nextDouble();
-
- double pk1 = rnd.nextDouble();
- int pk2 = rnd.nextInt();
- double pk3 = rnd.nextDouble();
-
- tb.reset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1z, p2z), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1z, p2z), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
- tb.addFieldEndOffset();
-
- appender.reset(hyracksFrame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
- + Math.min(p1z, p2z) + " " + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " "
- + Math.max(p1z, p2z));
- }
- }
-
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- String rtreeStats = rtree.printStats();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rtreeStats);
- }
-
- // disk-order scan
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("DISK-ORDER SCAN:");
- }
- TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
- indexAccessor.diskOrderScan(diskOrderCursor);
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference frameTuple = diskOrderCursor.getTuple();
- String rec = TupleUtils.printTuple(frameTuple, recDescSers);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rec);
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- diskOrderCursor.close();
- }
-
- TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
- rtree.getRootPageId());
- TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
- String string = stats.toString();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(string);
- }
-
- rtree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
-
- }
-
- // create an R-tree of two dimensions
- // fill the R-tree with random integer key values using insertions
- // perform ordered scan
- @Test
- public void test04() throws Exception {
-
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
-
- // declare keys
- int keyFieldCount = 4;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
- cmps[1] = cmps[0];
- cmps[2] = cmps[0];
- cmps[3] = cmps[0];
-
- // declare tuple fields
- int fieldCount = 7;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = IntegerPointable.TYPE_TRAITS;
- typeTraits[1] = IntegerPointable.TYPE_TRAITS;
- typeTraits[2] = IntegerPointable.TYPE_TRAITS;
- typeTraits[3] = IntegerPointable.TYPE_TRAITS;
- typeTraits[4] = DoublePointable.TYPE_TRAITS;
- typeTraits[5] = IntegerPointable.TYPE_TRAITS;
- typeTraits[6] = DoublePointable.TYPE_TRAITS;
-
- // create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- cmps.length, IntegerPointable.FACTORY);
-
- MultiComparator cmp = new MultiComparator(cmps);
-
- RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
-
- ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
- IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
- IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
- rtree.create(fileId);
- rtree.open(fileId);
-
- ByteBuffer hyracksFrame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- DataOutput dos = tb.getDataOutput();
-
- @SuppressWarnings("rawtypes")
- ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
- accessor.reset(hyracksFrame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- ITreeIndexAccessor indexAccessor = rtree.createAccessor();
-
- Random rnd = new Random();
- rnd.setSeed(50);
-
- Random rnd2 = new Random();
- rnd2.setSeed(50);
- for (int i = 0; i < 5000; i++) {
-
- int p1x = rnd.nextInt();
- int p1y = rnd.nextInt();
- int p2x = rnd.nextInt();
- int p2y = rnd.nextInt();
-
- double pk1 = rnd2.nextDouble();
- int pk2 = rnd2.nextInt();
- double pk3 = rnd2.nextDouble();
-
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
- tb.addFieldEndOffset();
-
- appender.reset(hyracksFrame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
- + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
- }
- }
-
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- String rtreeStats = rtree.printStats();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rtreeStats);
- }
-
- // disk-order scan
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("DISK-ORDER SCAN:");
- }
- TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
- indexAccessor.diskOrderScan(diskOrderCursor);
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference frameTuple = diskOrderCursor.getTuple();
- String rec = TupleUtils.printTuple(frameTuple, recDescSers);
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(rec);
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- diskOrderCursor.close();
- }
-
- TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
- rtree.getRootPageId());
- TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
- String string = stats.toString();
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(string);
- }
-
- rtree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
-
- }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
deleted file mode 100644
index a232e37..0000000
--- a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
+++ /dev/null
@@ -1,252 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree;
-
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
-import java.io.DataOutput;
-import java.io.File;
-import java.nio.ByteBuffer;
-import java.util.ArrayList;
-import java.util.Random;
-import java.util.logging.Level;
-
-import org.junit.Test;
-
-import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
-import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
-import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
-import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
-import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public class SearchCursorTest extends AbstractRTreeTest {
- private static final int PAGE_SIZE = 256;
- private static final int NUM_PAGES = 10;
- private static final int MAX_OPEN_FILES = 10;
- private static final int HYRACKS_FRAME_SIZE = 128;
- private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
- // create an R-tree of two dimensions
- // fill the R-tree with random values using insertions
- // and then perform range search
- @Test
- public void searchCursorTest() throws Exception {
-
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
-
- // declare keys
- int keyFieldCount = 4;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
- cmps[1] = cmps[0];
- cmps[2] = cmps[0];
- cmps[3] = cmps[0];
-
- // declare tuple fields
- int fieldCount = 5;
- ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
- typeTraits[0] = DoublePointable.TYPE_TRAITS;
- typeTraits[1] = DoublePointable.TYPE_TRAITS;
- typeTraits[2] = DoublePointable.TYPE_TRAITS;
- typeTraits[3] = DoublePointable.TYPE_TRAITS;
- typeTraits[4] = IntegerPointable.TYPE_TRAITS;
-
- // create value providers
- IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
- cmps.length, DoublePointable.FACTORY);
-
- MultiComparator cmp = new MultiComparator(cmps);
-
- RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
-
- ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory,
- valueProviderFactories);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
-
- IRTreeInteriorFrame interiorFrame = (IRTreeInteriorFrame) interiorFrameFactory.createFrame();
- IRTreeLeafFrame leafFrame = (IRTreeLeafFrame) leafFrameFactory.createFrame();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- RTree rtree = new RTree(bufferCache, fieldCount, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
- rtree.create(fileId);
- rtree.open(fileId);
-
- ByteBuffer hyracksFrame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- DataOutput dos = tb.getDataOutput();
-
- @SuppressWarnings("rawtypes")
- ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
- DoubleSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
- accessor.reset(hyracksFrame);
- FrameTupleReference tuple = new FrameTupleReference();
-
- ITreeIndexAccessor indexAccessor = rtree.createAccessor();
-
- Random rnd = new Random();
- rnd.setSeed(50);
- for (int i = 0; i < 5000; i++) {
-
- double p1x = rnd.nextDouble();
- double p1y = rnd.nextDouble();
- double p2x = rnd.nextDouble();
- double p2y = rnd.nextDouble();
-
- int pk = rnd.nextInt();
-
- tb.reset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
- tb.addFieldEndOffset();
-
- appender.reset(hyracksFrame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- if (i % 1000 == 0) {
- LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
- + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
- }
- }
-
- try {
- indexAccessor.insert(tuple);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
- }
-
- for (int i = 0; i < 50; i++) {
- double p1x = rnd.nextDouble();
- double p1y = rnd.nextDouble();
- double p2x = rnd.nextDouble();
- double p2y = rnd.nextDouble();
-
- int pk = rnd.nextInt();
-
- tb.reset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
- tb.addFieldEndOffset();
- DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
- tb.addFieldEndOffset();
-
- appender.reset(hyracksFrame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
- tuple.reset(accessor, 0);
-
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info(i + " Searching for: " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
- + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
- }
-
- ITreeIndexCursor searchCursor = new RTreeSearchCursor(interiorFrame, leafFrame);
- SearchPredicate searchPredicate = new SearchPredicate(tuple, cmp);
-
- indexAccessor.search(searchCursor, searchPredicate);
-
- ArrayList<Integer> results = new ArrayList<Integer>();
- try {
- while (searchCursor.hasNext()) {
- searchCursor.next();
- ITupleReference frameTuple = searchCursor.getTuple();
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(4),
- frameTuple.getFieldStart(4), frameTuple.getFieldLength(4));
- DataInput dataIn = new DataInputStream(inStream);
- Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
- results.add(res);
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- searchCursor.close();
- }
- if (LOGGER.isLoggable(Level.INFO)) {
- LOGGER.info("There are " + results.size() + " objects that satisfy the query");
- }
- }
-
- rtree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
- }
-}
\ No newline at end of file
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
new file mode 100644
index 0000000..7520793
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeMultiThreadTest;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+public class RTreeMultiThreadTest extends AbstractRTreeMultiThreadTest {
+
+ private RTreeTestHarness harness = new RTreeTestHarness();
+
+ private RTreeTestWorkerFactory workerFactory = new RTreeTestWorkerFactory();
+
+ @Override
+ protected void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+ IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories)
+ throws TreeIndexException {
+ return RTreeUtils.createRTree(harness.getBufferCache(), typeTraits,
+ valueProviderFactories, rtreeCmpFactories);
+
+ }
+
+ @Override
+ protected ITreeIndexTestWorkerFactory getWorkerFactory() {
+ return workerFactory;
+ }
+
+ @Override
+ protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
+ ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
+
+ // Insert only workload.
+ TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
+ workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
+
+ // Inserts mixed with scans.
+ TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN,
+ TestOperation.DISKORDER_SCAN };
+ workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
+
+ // Inserts and deletes.
+ TestOperation[] insertDeleteOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteOps, getUniformOpProbs(insertDeleteOps)));
+
+ // All operations mixed.
+ TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN,
+ TestOperation.DISKORDER_SCAN };
+ workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
+
+ return workloadConfs;
+ }
+
+ @Override
+ protected int getFileId() {
+ return harness.getTreeFileId();
+ }
+
+ @Override
+ protected String getIndexTypeName() {
+ return "RTree";
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
new file mode 100644
index 0000000..f5867e6
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
@@ -0,0 +1,115 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+
+public class RTreeTestWorker extends AbstractTreeIndexTestWorker {
+
+ private final RTree rtree;
+ private final int numFields;
+ private final ArrayTupleReference rearrangedTuple = new ArrayTupleReference();
+ private final ArrayTupleBuilder rearrangedTb;
+
+ public RTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+ super(dataGen, opSelector, index, numBatches);
+ rtree = (RTree) index;
+ numFields = rtree.getFieldCount();
+ rearrangedTb = new ArrayTupleBuilder(numFields);
+ }
+
+ @Override
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, IndexException {
+ RTree.RTreeAccessor accessor = (RTree.RTreeAccessor) indexAccessor;
+ IIndexCursor searchCursor = accessor.createSearchCursor();
+ ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
+ MultiComparator cmp = accessor.getOpContext().cmp;
+ SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
+
+ switch (op) {
+ case INSERT:
+ rearrangeTuple(tuple, cmp);
+ accessor.insert(rearrangedTuple);
+ break;
+
+ case DELETE:
+ rearrangeTuple(tuple, cmp);
+ accessor.delete(rearrangedTuple);
+ break;
+
+ case SCAN:
+ searchCursor.reset();
+ rangePred.setSearchKey(null);
+ accessor.search(searchCursor, rangePred);
+ consumeCursorTuples(searchCursor);
+ break;
+
+ case DISKORDER_SCAN:
+ diskOrderScanCursor.reset();
+ accessor.diskOrderScan(diskOrderScanCursor);
+ consumeCursorTuples(diskOrderScanCursor);
+ break;
+
+ default:
+ throw new HyracksDataException("Op " + op.toString() + " not supported.");
+ }
+ }
+
+ private void rearrangeTuple(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+ // Create a tuple with rearranged key values to make sure lower points
+ // have larger coordinates than high points.
+ rearrangedTb.reset();
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ if (c > 0) {
+ rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ } else {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ }
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ if (c > 0) {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ } else {
+ rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ }
+ }
+ for (int i = cmp.getKeyFieldCount(); i < numFields; i++) {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
new file mode 100644
index 0000000..d4f14ca
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import edu.uci.ics.hyracks.storage.am.common.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+
+public class RTreeTestWorkerFactory implements ITreeIndexTestWorkerFactory {
+ @Override
+ public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
+ ITreeIndex index, int numBatches) {
+ return new RTreeTestWorker(dataGen, opSelector, index, numBatches);
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
new file mode 100644
index 0000000..fccb29c
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/AbstractRTreeTest.java
@@ -0,0 +1,46 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.utils;
+
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+
+public abstract class AbstractRTreeTest {
+ protected final Logger LOGGER = Logger.getLogger(RTreeTestHarness.class.getName());
+ protected final RTreeTestHarness harness;
+
+ public AbstractRTreeTest() {
+ harness = new RTreeTestHarness();
+ }
+
+ public AbstractRTreeTest(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+ harness = new RTreeTestHarness(pageSize, numPages, maxOpenFiles, hyracksFrameSize);
+ }
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java
new file mode 100644
index 0000000..039fb0b
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestContext.java
@@ -0,0 +1,60 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.utils;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.rtree.AbstractRTreeTestContext;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+@SuppressWarnings("rawtypes")
+public class RTreeTestContext extends AbstractRTreeTestContext {
+
+ public RTreeTestContext(ISerializerDeserializer[] fieldSerdes, ITreeIndex treeIndex) {
+ super(fieldSerdes, treeIndex);
+ }
+
+ @Override
+ public int getKeyFieldCount() {
+ RTree rtree = (RTree) treeIndex;
+ return rtree.getComparatorFactories().length;
+ }
+
+ @Override
+ public IBinaryComparatorFactory[] getComparatorFactories() {
+ RTree rtree = (RTree) treeIndex;
+ return rtree.getComparatorFactories();
+ }
+
+ public static RTreeTestContext create(IBufferCache bufferCache, int rtreeFileId,
+ ISerializerDeserializer[] fieldSerdes, IPrimitiveValueProviderFactory[] valueProviderFactories,
+ int numKeyFields) throws Exception {
+ ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+ IBinaryComparatorFactory[] cmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeyFields);
+ RTree rtree = RTreeUtils
+ .createRTree(bufferCache, typeTraits, valueProviderFactories, cmpFactories);
+ rtree.create(rtreeFileId);
+ rtree.open(rtreeFileId);
+ RTreeTestContext testCtx = new RTreeTestContext(fieldSerdes, rtree);
+ return testCtx;
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
new file mode 100644
index 0000000..c0cec35
--- /dev/null
+++ b/hyracks/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
@@ -0,0 +1,123 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.utils;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class RTreeTestHarness {
+
+ private static final long RANDOM_SEED = 50;
+ private static final int DEFAULT_PAGE_SIZE = 256;
+ private static final int DEFAULT_NUM_PAGES = 1000;
+ private static final int DEFAULT_MAX_OPEN_FILES = 10;
+ private static final int DEFAULT_HYRACKS_FRAME_SIZE = 128;
+
+ protected final int pageSize;
+ protected final int numPages;
+ protected final int maxOpenFiles;
+ protected final int hyracksFrameSize;
+
+ protected IHyracksTaskContext ctx;
+ protected IBufferCache bufferCache;
+ protected int treeFileId;
+
+ protected final Random rnd = new Random();
+ protected final SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ protected final String tmpDir = System.getProperty("java.io.tmpdir");
+ protected final String sep = System.getProperty("file.separator");
+ protected String fileName;
+
+ public RTreeTestHarness() {
+ this.pageSize = DEFAULT_PAGE_SIZE;
+ this.numPages = DEFAULT_NUM_PAGES;
+ this.maxOpenFiles = DEFAULT_MAX_OPEN_FILES;
+ this.hyracksFrameSize = DEFAULT_HYRACKS_FRAME_SIZE;
+ }
+
+ public RTreeTestHarness(int pageSize, int numPages, int maxOpenFiles, int hyracksFrameSize) {
+ this.pageSize = pageSize;
+ this.numPages = numPages;
+ this.maxOpenFiles = maxOpenFiles;
+ this.hyracksFrameSize = hyracksFrameSize;
+ }
+
+ public void setUp() throws HyracksDataException {
+ fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+ ctx = TestUtils.create(getHyracksFrameSize());
+ TestStorageManagerComponentHolder.init(pageSize, numPages, maxOpenFiles);
+ bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ treeFileId = fmp.lookupFileId(file);
+ bufferCache.openFile(treeFileId);
+ rnd.setSeed(RANDOM_SEED);
+ }
+
+ public void tearDown() throws HyracksDataException {
+ bufferCache.closeFile(treeFileId);
+ bufferCache.close();
+ File f = new File(fileName);
+ f.deleteOnExit();
+ }
+
+ public IHyracksTaskContext getHyracksTaskContext() {
+ return ctx;
+ }
+
+ public IBufferCache getBufferCache() {
+ return bufferCache;
+ }
+
+ public int getTreeFileId() {
+ return treeFileId;
+ }
+
+ public String getFileName() {
+ return fileName;
+ }
+
+ public Random getRandom() {
+ return rnd;
+ }
+
+ public int getPageSize() {
+ return pageSize;
+ }
+
+ public int getNumPages() {
+ return numPages;
+ }
+
+ public int getHyracksFrameSize() {
+ return hyracksFrameSize;
+ }
+
+ public int getMaxOpenFiles() {
+ return maxOpenFiles;
+ }
+}
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
index 6c25b9b..fe652c8 100644
--- a/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/pom.xml
@@ -33,12 +33,6 @@
</dependency>
<dependency>
<groupId>edu.uci.ics.hyracks</groupId>
- <artifactId>hyracks-control-nc</artifactId>
- <version>0.2.1-SNAPSHOT</version>
- <scope>compile</scope>
- </dependency>
- <dependency>
- <groupId>edu.uci.ics.hyracks</groupId>
<artifactId>hyracks-storage-common</artifactId>
<version>0.2.1-SNAPSHOT</version>
<scope>compile</scope>
@@ -48,7 +42,7 @@
<artifactId>hyracks-test-support</artifactId>
<version>0.2.1-SNAPSHOT</version>
<type>jar</type>
- <scope>test</scope>
+ <scope>compile</scope>
</dependency>
</dependencies>
</project>
diff --git a/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
index fff9e31..a649aa7 100644
--- a/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
+++ b/hyracks/hyracks-tests/hyracks-storage-common-test/src/test/java/edu/uci/ics/hyracks/storage/common/BufferCacheRegressionTests.java
@@ -10,8 +10,8 @@
import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileHandle;
import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.api.io.IFileHandle;
import edu.uci.ics.hyracks.api.io.IIOManager;
import edu.uci.ics.hyracks.api.io.IIOManager.FileReadWriteMode;
import edu.uci.ics.hyracks.api.io.IIOManager.FileSyncMode;
@@ -23,158 +23,149 @@
import edu.uci.ics.hyracks.test.support.TestUtils;
public class BufferCacheRegressionTests {
- protected static final String tmpDir = System.getProperty("java.io.tmpdir");
- protected static final String sep = System.getProperty("file.separator");
+ protected static final String tmpDir = System.getProperty("java.io.tmpdir");
+ protected static final String sep = System.getProperty("file.separator");
- protected String fileName = tmpDir + sep + "flushTestFile";
+ protected String fileName = tmpDir + sep + "flushTestFile";
- private static final int PAGE_SIZE = 256;
- private static final int HYRACKS_FRAME_SIZE = PAGE_SIZE;
- private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+ private static final int PAGE_SIZE = 256;
+ private static final int HYRACKS_FRAME_SIZE = PAGE_SIZE;
+ private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
- // We want to test the following behavior when reclaiming a file slot in the
- // buffer cache:
- // 1. If the file being evicted was deleted, then its dirty pages should be
- // invalidated, but most not be flushed.
- // 2. If the file was not deleted, then we must flush its dirty pages.
- @Test
- public void testFlushBehaviorOnFileEviction() throws IOException {
- File f = new File(fileName);
- if (f.exists()) {
- f.delete();
- }
- flushBehaviorTest(true);
- flushBehaviorTest(false);
- }
+ // We want to test the following behavior when reclaiming a file slot in the
+ // buffer cache:
+ // 1. If the file being evicted was deleted, then its dirty pages should be
+ // invalidated, but most not be flushed.
+ // 2. If the file was not deleted, then we must flush its dirty pages.
+ @Test
+ public void testFlushBehaviorOnFileEviction() throws IOException {
+ File f = new File(fileName);
+ if (f.exists()) {
+ f.delete();
+ }
+ flushBehaviorTest(true);
+ flushBehaviorTest(false);
+ }
- private void flushBehaviorTest(boolean deleteFile) throws IOException {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, 10, 1);
+ private void flushBehaviorTest(boolean deleteFile) throws IOException {
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, 10, 1);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference firstFileRef = new FileReference(new File(fileName));
- bufferCache.createFile(firstFileRef);
- int firstFileId = fmp.lookupFileId(firstFileRef);
- bufferCache.openFile(firstFileId);
+ FileReference firstFileRef = new FileReference(new File(fileName));
+ bufferCache.createFile(firstFileRef);
+ int firstFileId = fmp.lookupFileId(firstFileRef);
+ bufferCache.openFile(firstFileId);
- // Fill the first page with known data and make it dirty by write
- // latching it.
- ICachedPage writePage = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(firstFileId, 0), true);
- writePage.acquireWriteLatch();
- try {
- ByteBuffer buf = writePage.getBuffer();
- for (int i = 0; i < buf.capacity(); i++) {
- buf.put(Byte.MAX_VALUE);
- }
- } finally {
- writePage.releaseWriteLatch();
- bufferCache.unpin(writePage);
- }
- bufferCache.closeFile(firstFileId);
- if (deleteFile) {
- bufferCache.deleteFile(firstFileId, false);
- }
+ // Fill the first page with known data and make it dirty by write
+ // latching it.
+ ICachedPage writePage = bufferCache.pin(BufferedFileHandle.getDiskPageId(firstFileId, 0), true);
+ writePage.acquireWriteLatch();
+ try {
+ ByteBuffer buf = writePage.getBuffer();
+ for (int i = 0; i < buf.capacity(); i++) {
+ buf.put(Byte.MAX_VALUE);
+ }
+ } finally {
+ writePage.releaseWriteLatch();
+ bufferCache.unpin(writePage);
+ }
+ bufferCache.closeFile(firstFileId);
+ if (deleteFile) {
+ bufferCache.deleteFile(firstFileId, false);
+ }
- // Create a file with the same name.
- FileReference secondFileRef = new FileReference(new File(fileName));
- bufferCache.createFile(secondFileRef);
- int secondFileId = fmp.lookupFileId(secondFileRef);
+ // Create a file with the same name.
+ FileReference secondFileRef = new FileReference(new File(fileName));
+ bufferCache.createFile(secondFileRef);
+ int secondFileId = fmp.lookupFileId(secondFileRef);
- // This open will replace the firstFileRef's slot in the BufferCache,
- // causing it's pages to be cleaned up. We want to make sure that those
- // dirty pages are not flushed to the disk, because the file was
- // declared as deleted, and
- // somebody might be already using the same filename again (having been
- // assigned a different fileId).
- bufferCache.openFile(secondFileId);
+ // This open will replace the firstFileRef's slot in the BufferCache,
+ // causing it's pages to be cleaned up. We want to make sure that those
+ // dirty pages are not flushed to the disk, because the file was
+ // declared as deleted, and
+ // somebody might be already using the same filename again (having been
+ // assigned a different fileId).
+ bufferCache.openFile(secondFileId);
- // Manually open the file and inspect it's contents. We cannot simply
- // ask the BufferCache to pin the page, because it would return the same
- // physical memory again, and for performance reasons pages are never
- // reset with 0's.
- IIOManager ioManager = ctx.getIOManager();
- FileReference testFileRef = new FileReference(new File(fileName));
- FileHandle testFileHandle = new FileHandle(testFileRef);
- testFileHandle.open(FileReadWriteMode.READ_ONLY,
- FileSyncMode.METADATA_SYNC_DATA_SYNC);
- ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE);
- ioManager.syncRead(testFileHandle, 0, testBuffer);
- for (int i = 0; i < testBuffer.capacity(); i++) {
- if (deleteFile) {
- // We deleted the file. We expect to see a clean buffer.
- if (testBuffer.get(i) == Byte.MAX_VALUE) {
- fail("Page 0 of deleted file was fazily flushed in openFile(), "
- + "corrupting the data of a newly created file with the same name.");
- }
- } else {
- // We didn't delete the file. We expect to see a buffer full of
- // Byte.MAX_VALUE.
- if (testBuffer.get(i) != Byte.MAX_VALUE) {
- fail("Page 0 of closed file was not flushed when properly, when reclaiming the file slot of fileId 0 in the BufferCache.");
- }
- }
- }
- testFileHandle.close();
- bufferCache.closeFile(secondFileId);
- if (deleteFile) {
- bufferCache.deleteFile(secondFileId, false);
- }
- bufferCache.close();
- }
+ // Manually open the file and inspect it's contents. We cannot simply
+ // ask the BufferCache to pin the page, because it would return the same
+ // physical memory again, and for performance reasons pages are never
+ // reset with 0's.
+ IIOManager ioManager = ctx.getIOManager();
+ FileReference testFileRef = new FileReference(new File(fileName));
+ IFileHandle testFileHandle = ioManager.open(testFileRef, FileReadWriteMode.READ_ONLY,
+ FileSyncMode.METADATA_SYNC_DATA_SYNC);
+ ByteBuffer testBuffer = ByteBuffer.allocate(PAGE_SIZE);
+ ioManager.syncRead(testFileHandle, 0, testBuffer);
+ for (int i = 0; i < testBuffer.capacity(); i++) {
+ if (deleteFile) {
+ // We deleted the file. We expect to see a clean buffer.
+ if (testBuffer.get(i) == Byte.MAX_VALUE) {
+ fail("Page 0 of deleted file was fazily flushed in openFile(), "
+ + "corrupting the data of a newly created file with the same name.");
+ }
+ } else {
+ // We didn't delete the file. We expect to see a buffer full of
+ // Byte.MAX_VALUE.
+ if (testBuffer.get(i) != Byte.MAX_VALUE) {
+ fail("Page 0 of closed file was not flushed when properly, when reclaiming the file slot of fileId 0 in the BufferCache.");
+ }
+ }
+ }
+ ioManager.close(testFileHandle);
+ bufferCache.closeFile(secondFileId);
+ if (deleteFile) {
+ bufferCache.deleteFile(secondFileId, false);
+ }
+ bufferCache.close();
+ }
- // Tests the behavior of the BufferCache when more than all pages are
- // pinned. We expect an exception.
- @Test
- public void testPinningAllPages() throws HyracksDataException {
- int numPages = 10;
- TestStorageManagerComponentHolder.init(PAGE_SIZE, numPages, 1);
+ // Tests the behavior of the BufferCache when more than all pages are
+ // pinned. We expect an exception.
+ @Test
+ public void testPinningAllPages() throws HyracksDataException {
+ int numPages = 10;
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, numPages, 1);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
- FileReference firstFileRef = new FileReference(new File(fileName));
- bufferCache.createFile(firstFileRef);
- int fileId = fmp.lookupFileId(firstFileRef);
- bufferCache.openFile(fileId);
+ FileReference firstFileRef = new FileReference(new File(fileName));
+ bufferCache.createFile(firstFileRef);
+ int fileId = fmp.lookupFileId(firstFileRef);
+ bufferCache.openFile(fileId);
- // Pin all pages.
- ICachedPage[] pages = new ICachedPage[numPages];
- for (int i = 0; i < numPages; ++i) {
- pages[i] = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, i), true);
- }
+ // Pin all pages.
+ ICachedPage[] pages = new ICachedPage[numPages];
+ for (int i = 0; i < numPages; ++i) {
+ pages[i] = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, i), true);
+ }
- // Try to pin another page. We expect a HyracksDataException.
- ICachedPage errorPage = null;
- try {
- errorPage = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, numPages), true);
- } catch (HyracksDataException e) {
- // This is the expected outcome.
- // The BufferCache should still be able to function properly.
- // Try unpinning all pages.
- for (int i = 0; i < numPages; ++i) {
- bufferCache.unpin(pages[i]);
- }
- // Now try pinning the page that failed above again.
- errorPage = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, numPages), true);
- // Unpin it.
- bufferCache.unpin(errorPage);
- // Cleanup.
- bufferCache.closeFile(fileId);
- bufferCache.close();
- return;
- } catch (Exception e) {
- fail("Expected a HyracksDataException when pinning more pages than available but got another exception: "
- + e.getMessage());
- }
- fail("Expected a HyracksDataException when pinning more pages than available.");
- }
+ // Try to pin another page. We expect a HyracksDataException.
+ ICachedPage errorPage = null;
+ try {
+ errorPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, numPages), true);
+ } catch (HyracksDataException e) {
+ // This is the expected outcome.
+ // The BufferCache should still be able to function properly.
+ // Try unpinning all pages.
+ for (int i = 0; i < numPages; ++i) {
+ bufferCache.unpin(pages[i]);
+ }
+ // Now try pinning the page that failed above again.
+ errorPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, numPages), true);
+ // Unpin it.
+ bufferCache.unpin(errorPage);
+ // Cleanup.
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ return;
+ } catch (Exception e) {
+ fail("Expected a HyracksDataException when pinning more pages than available but got another exception: "
+ + e.getMessage());
+ }
+ fail("Expected a HyracksDataException when pinning more pages than available.");
+ }
}