Recreating changes from hyracks_transactions_fix branch and hyracks_btree_updates branch in this branch created off of hyracks_dev_next.
git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_btree_updates_next@587 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-examples/btree-example/btreeapp/pom.xml b/hyracks-examples/btree-example/btreeapp/pom.xml
index e3f0590..737eaa0 100644
--- a/hyracks-examples/btree-example/btreeapp/pom.xml
+++ b/hyracks-examples/btree-example/btreeapp/pom.xml
@@ -11,6 +11,35 @@
</parent>
<build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <versionRange>[1.0.0,)</versionRange>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore />
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
index 0d3ab7f..5fc56a1 100644
--- a/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
+++ b/hyracks-examples/hadoop-compat-example/hadoopcompatapp/pom.xml
@@ -11,6 +11,35 @@
</parent>
<build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <versionRange>[1.0.0,)</versionRange>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore />
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-examples/text-example/textapp/pom.xml b/hyracks-examples/text-example/textapp/pom.xml
index 269ef2b..dd1b5ae 100644
--- a/hyracks-examples/text-example/textapp/pom.xml
+++ b/hyracks-examples/text-example/textapp/pom.xml
@@ -11,6 +11,35 @@
</parent>
<build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <versionRange>[1.0.0,)</versionRange>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore />
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-examples/tpch-example/tpchapp/pom.xml b/hyracks-examples/tpch-example/tpchapp/pom.xml
index 5e11a20..3ea1e99 100644
--- a/hyracks-examples/tpch-example/tpchapp/pom.xml
+++ b/hyracks-examples/tpch-example/tpchapp/pom.xml
@@ -11,6 +11,35 @@
</parent>
<build>
+ <pluginManagement>
+ <plugins>
+ <plugin>
+ <groupId>org.eclipse.m2e</groupId>
+ <artifactId>lifecycle-mapping</artifactId>
+ <version>1.0.0</version>
+ <configuration>
+ <lifecycleMappingMetadata>
+ <pluginExecutions>
+ <pluginExecution>
+ <pluginExecutionFilter>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <versionRange>[1.0.0,)</versionRange>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ </pluginExecutionFilter>
+ <action>
+ <ignore />
+ </action>
+ </pluginExecution>
+ </pluginExecutions>
+ </lifecycleMappingMetadata>
+ </configuration>
+ </plugin>
+ </plugins>
+ </pluginManagement>
+
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
index bd4947c..f74e2ee 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrame.java
@@ -30,6 +30,7 @@
import edu.uci.ics.hyracks.storage.am.btree.api.IFrameCompressor;
import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
import edu.uci.ics.hyracks.storage.am.btree.compressors.FieldPrefixCompressor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeDuplicateKeyException;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixPrefixTupleReference;
import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
@@ -168,12 +169,12 @@
slotManager.setSlot(sortedTupleOffs.get(i).slotOff, slotManager.encodeSlotFields(prefixSlotNum, freeSpace));
freeSpace += tupleLength;
}
-
+
buf.putInt(freeSpaceOff, freeSpace);
int totalFreeSpace = buf.capacity() - buf.getInt(freeSpaceOff)
- ((buf.getInt(tupleCountOff) + buf.getInt(prefixTupleCountOff)) * slotManager.getSlotSize());
buf.putInt(totalFreeSpaceOff, totalFreeSpace);
-
+
return false;
}
@@ -192,9 +193,8 @@
frameTuple.setFieldCount(cmp.getFieldCount());
frameTuple.resetByTupleIndex(this, tupleIndex);
- int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1, cmp
- .getFieldCount()
- - cmp.getKeyFieldCount());
+ int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1,
+ cmp.getFieldCount() - cmp.getKeyFieldCount());
if (comparison != 0) {
throw new BTreeException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
}
@@ -233,16 +233,15 @@
int bytesRequired = tupleWriter.bytesRequired(tuple);
- // see if the tuple would fit uncompressed
+ // See if the tuple would fit uncompressed.
if (bytesRequired + slotManager.getSlotSize() <= freeContiguous)
return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
- // see if tuple would fit into remaining space after compaction
+ // See if tuple would fit into remaining space after compaction.
if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
return FrameOpSpaceStatus.SUFFICIENT_SPACE;
- // see if the tuple matches a prefix and will fit after truncating the
- // prefix
+ // See if the tuple matches a prefix and will fit after truncating the prefix.
int prefixSlotNum = slotManager.findPrefix(tuple, framePrefixTuple, cmp);
if (prefixSlotNum != FieldPrefixSlotManager.TUPLE_UNCOMPRESSED) {
int prefixSlotOff = slotManager.getPrefixSlotOff(prefixSlotNum);
@@ -259,7 +258,7 @@
}
@Override
- public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple, MultiComparator cmp) {
+ public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference tuple, int oldTupleIndex, MultiComparator cmp) {
// TODO Auto-generated method stub
return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
@@ -292,13 +291,26 @@
}
@Override
- public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
- return slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
- FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp, boolean throwIfKeyExists) throws Exception {
+ int slot = slotManager.findSlot(tuple, frameTuple, framePrefixTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ if (!throwIfKeyExists) {
+ return slot;
+ }
+ int tupleIndex = slotManager.decodeSecondSlotField(slot);
+ if (tupleIndex != FieldPrefixSlotManager.GREATEST_SLOT) {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ frameTuple.resetByTupleIndex(this, tupleIndex);
+ int comparison = cmp.fieldRangeCompare(tuple, frameTuple, 0, cmp.getKeyFieldCount());
+ if (comparison == 0) {
+ throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
+ }
+ }
+ return slot;
}
-
+
@Override
- public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
int slot = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
int prefixSlotNum = slotManager.decodeFirstSlotField(slot);
int numPrefixFields = 0;
@@ -320,11 +332,11 @@
}
@Override
- public void update(int rid, ITupleReference tuple) throws Exception {
+ public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) throws Exception {
// TODO Auto-generated method stub
}
-
+
@Override
public void printHeader() {
// TODO Auto-generated method stub
@@ -348,8 +360,8 @@
for (int i = 0; i < tupleCount; i++) {
frameTuple.resetByTupleIndex(this, i);
for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
- .getFieldStart(j), frameTuple.getFieldLength(j));
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
DataInput dataIn = new DataInputStream(inStream);
Object o = fields[j].deserialize(dataIn);
strBuilder.append(o.toString() + " ");
@@ -386,7 +398,7 @@
public boolean isLeaf() {
return buf.get(levelOff) == 0;
}
-
+
@Override
public boolean isInterior() {
return buf.get(levelOff) > 0;
@@ -461,7 +473,7 @@
BTreeFieldPrefixNSMLeafFrame rf = (BTreeFieldPrefixNSMLeafFrame) rightFrame;
frameTuple.setFieldCount(cmp.getFieldCount());
-
+
ByteBuffer right = rf.getBuffer();
int tupleCount = getTupleCount();
int prefixTupleCount = getPrefixTupleCount();
@@ -522,7 +534,7 @@
int bytesWritten = 0;
if (lastPrefixSlotNum != prefixSlotNum) {
- bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right, freeSpace);
+ bytesWritten = tupleWriter.writeTuple(framePrefixTuple, right.array(), freeSpace);
int newPrefixSlot = rf.slotManager
.encodeSlotFields(framePrefixTuple.getFieldCount(), freeSpace);
int prefixSlotOff = rf.slotManager.getPrefixSlotOff(prefixSlotNum);
@@ -571,7 +583,8 @@
rightFrame.compact(cmp);
// insert last key
- int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp);
+ // TODO: can we avoid checking for duplicates here?
+ int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp, true);
targetFrame.insert(tuple, cmp, targetTupleIndex);
// set split key to be highest value in left page
@@ -658,7 +671,7 @@
}
@Override
- public int getPageHeaderSize() {
- return nextLeafOff;
- }
+ public int getPageHeaderSize() {
+ return nextLeafOff;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
index 05b43d3..70ac13a 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeFieldPrefixNSMLeafFrameFactory.java
@@ -22,7 +22,7 @@
public class BTreeFieldPrefixNSMLeafFrameFactory implements ITreeIndexFrameFactory {
private static final long serialVersionUID = 1L;
- private ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private final ITreeIndexTupleWriterFactory tupleWriterFactory;
public BTreeFieldPrefixNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
this.tupleWriterFactory = tupleWriterFactory;
@@ -32,4 +32,9 @@
public IBTreeLeafFrame createFrame() {
return new BTreeFieldPrefixNSMLeafFrame(tupleWriterFactory.createTupleWriter());
}
+
+ @Override
+ public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+ return tupleWriterFactory;
+ }
}
\ No newline at end of file
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
index 6025003..0d56cb0 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrame.java
@@ -26,7 +26,7 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeDuplicateKeyException;
import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
@@ -44,7 +44,7 @@
private static final int rightLeafOff = smFlagOff + 1;
private static final int childPtrSize = 4;
-
+
// private SimpleTupleReference cmpFrameTuple = new SimpleTupleReference();
private ITreeIndexTupleReference cmpFrameTuple;
@@ -77,13 +77,17 @@
return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
- public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp, boolean throwIfKeyExists) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ if (!throwIfKeyExists) {
+ return tupleIndex;
+ }
int slotOff = slotManager.getSlotOff(tupleIndex);
boolean isDuplicate = true;
+ // TODO: We may not need to check for duplicates in interior nodes.
if (tupleIndex < 0)
isDuplicate = false; // greater than all existing keys
else {
@@ -92,45 +96,44 @@
isDuplicate = false;
}
if (isDuplicate) {
- throw new BTreeException("Trying to insert duplicate value into interior node.");
+ throw new BTreeDuplicateKeyException("Trying to insert duplicate key into interior node.");
}
return tupleIndex;
}
-
+
@Override
public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
- int slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
- int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf
- .array(), freeSpace + bytesWritten, childPtrSize);
- int tupleSize = bytesWritten + childPtrSize;
+ int slotOff = slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp), buf.array(),
+ freeSpace + bytesWritten, childPtrSize);
+ int tupleSize = bytesWritten + childPtrSize;
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
- // did insert into the rightmost slot?
- if (slotOff == slotManager.getSlotEndOff()) {
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp)
- + childPtrSize, buf.array(), rightLeafOff, childPtrSize);
- } else {
- // if slotOff has a right (slot-)neighbor then update its child
- // pointer
- // the only time when this is NOT the case, is when this is the
- // first tuple
- // (or when the splitkey goes into the rightmost slot but that
- // case was handled in the if above)
- if (buf.getInt(tupleCountOff) > 1) {
- int rightNeighborOff = slotOff - slotManager.getSlotSize();
- frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(rightNeighborOff));
- System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize,
- buf.array(), getLeftChildPageOff(frameTuple, cmp), childPtrSize);
- }
- }
+ // did insert into the rightmost slot?
+ if (slotOff == slotManager.getSlotEndOff()) {
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getLeftChildPageOff(tuple, cmp)
+ + childPtrSize, buf.array(), rightLeafOff, childPtrSize);
+ } else {
+ // if slotOff has a right (slot-)neighbor then update its child
+ // pointer
+ // the only time when this is NOT the case, is when this is the
+ // first tuple
+ // (or when the splitkey goes into the rightmost slot but that
+ // case was handled in the if above)
+ if (buf.getInt(tupleCountOff) > 1) {
+ int rightNeighborOff = slotOff - slotManager.getSlotSize();
+ frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(rightNeighborOff));
+ System.arraycopy(tuple.getFieldData(0), getLeftChildPageOff(tuple, cmp) + childPtrSize, buf.array(),
+ getLeftChildPageOff(frameTuple, cmp), childPtrSize);
+ }
+ }
}
-
@Override
public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
int freeSpace = buf.getInt(freeSpaceOff);
@@ -151,7 +154,7 @@
throws Exception {
// before doing anything check if key already exists
frameTuple.setFieldCount(cmp.getKeyFieldCount());
-
+
ByteBuffer right = rightFrame.getBuffer();
int tupleCount = buf.getInt(tupleCountOff);
@@ -203,9 +206,10 @@
compact(cmp);
// insert key
- int targetTupleIndex = targetFrame.findTupleIndex(savedSplitKey.getTuple(), cmp);
+ // TODO: can we avoid checking for duplicates here?
+ int targetTupleIndex = targetFrame.findTupleIndex(savedSplitKey.getTuple(), cmp, true);
targetFrame.insert(savedSplitKey.getTuple(), cmp, targetTupleIndex);
-
+
return 0;
}
@@ -242,7 +246,7 @@
buf.putInt(freeSpaceOff, freeSpace);
buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
-
+
return false;
}
@@ -316,7 +320,7 @@
}
slotOff += slotManager.getSlotSize();
}
-
+
frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
int childPageOff = getLeftChildPageOff(frameTuple, srcCmp);
return buf.getInt(childPageOff);
@@ -351,8 +355,8 @@
// maintain space information
buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize
- + slotManager.getSlotSize());
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
}
@Override
@@ -388,8 +392,10 @@
for (int i = 0; i < tupleCount; i++) {
int tupleOff = slotManager.getTupleOff(slotManager.getSlotOff(i));
frameTuple.resetByTupleOffset(buf, tupleOff);
- int intVal = getInt(buf.array(), frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
- + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
+ int intVal = getInt(
+ buf.array(),
+ frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1));
ret.add(intVal);
}
if (!isLeaf()) {
@@ -411,8 +417,8 @@
// maintain space information
buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize
- + slotManager.getSlotSize());
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + keySize + childPtrSize + slotManager.getSlotSize());
int freeSpace = buf.getInt(freeSpaceOff);
if (freeSpace == tupleOff + keySize + childPtrSize) {
@@ -433,8 +439,8 @@
for (int i = 0; i < tupleCount; i++) {
frameTuple.resetByTupleIndex(this, i);
for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
- .getFieldStart(j), frameTuple.getFieldLength(j));
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
DataInput dataIn = new DataInputStream(inStream);
Object o = fields[j].deserialize(dataIn);
strBuilder.append(o.toString() + " ");
@@ -444,9 +450,9 @@
strBuilder.append("\n");
return strBuilder.toString();
}
-
+
@Override
- public int getPageHeaderSize() {
- return rightLeafOff;
- }
+ public int getPageHeaderSize() {
+ return rightLeafOff;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
index 6b30ee0..8618df8 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMInteriorFrameFactory.java
@@ -22,7 +22,7 @@
public class BTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
private static final long serialVersionUID = 1L;
- private ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private final ITreeIndexTupleWriterFactory tupleWriterFactory;
public BTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
this.tupleWriterFactory = tupleWriterFactory;
@@ -32,4 +32,9 @@
public IBTreeInteriorFrame createFrame() {
return new BTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter());
}
+
+ @Override
+ public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+ return tupleWriterFactory;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
index 85fbec9..0ffc400 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrame.java
@@ -20,7 +20,7 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeDuplicateKeyException;
import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
@@ -66,42 +66,46 @@
}
@Override
- public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp, boolean throwIfKeyExists) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ if (!throwIfKeyExists) {
+ return tupleIndex;
+ }
int slotOff = slotManager.getSlotOff(tupleIndex);
boolean isDuplicate = true;
-
- if (tupleIndex < 0)
- isDuplicate = false; // greater than all existing keys
+ if (tupleIndex < 0) {
+ // Greater than all existing keys.
+ isDuplicate = false;
+ }
else {
frameTuple.resetByTupleOffset(buf, slotManager.getTupleOff(slotOff));
- if (cmp.compare(tuple, frameTuple) != 0)
+ if (cmp.compare(tuple, frameTuple) != 0) {
isDuplicate = false;
+ }
}
if (isDuplicate) {
- throw new BTreeException("Trying to insert duplicate value into leaf of unique index");
+ throw new BTreeDuplicateKeyException("Trying to insert duplicate key into leaf node.");
}
-
return tupleIndex;
}
-
+
@Override
- public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
- slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
- int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), freeSpace);
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
}
@Override
public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
int freeSpace = buf.getInt(freeSpaceOff);
slotManager.insertSlot(-1, freeSpace);
- int bytesWritten = tupleWriter.writeTuple(tuple, buf, freeSpace);
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), freeSpace);
buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
@@ -112,7 +116,7 @@
throws Exception {
frameTuple.setFieldCount(cmp.getFieldCount());
-
+
ByteBuffer right = rightFrame.getBuffer();
int tupleCount = getTupleCount();
@@ -149,7 +153,8 @@
compact(cmp);
// insert last key
- int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp);
+ // TODO: can we avoid checking for duplicates here?
+ int targetTupleIndex = targetFrame.findTupleIndex(tuple, cmp, true);
targetFrame.insert(tuple, cmp, targetTupleIndex);
// set split key to be highest value in left page
@@ -160,7 +165,7 @@
splitKey.initData(splitKeySize);
tupleWriter.writeTupleFields(frameTuple, 0, cmp.getKeyFieldCount(), splitKey.getBuffer(), 0);
splitKey.getTuple().resetByTupleOffset(splitKey.getBuffer(), 0);
-
+
return 0;
}
@@ -181,8 +186,8 @@
return slotManager.findTupleIndex(searchKey, pageTuple, cmp, ftm, ftp);
}
- @Override
- public int getPageHeaderSize() {
- return nextLeafOff;
- }
+ @Override
+ public int getPageHeaderSize() {
+ return nextLeafOff;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
index d59b391..a56390b 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/frames/BTreeNSMLeafFrameFactory.java
@@ -22,7 +22,7 @@
public class BTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
private static final long serialVersionUID = 1L;
- private ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private final ITreeIndexTupleWriterFactory tupleWriterFactory;
public BTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory) {
this.tupleWriterFactory = tupleWriterFactory;
@@ -32,4 +32,9 @@
public IBTreeLeafFrame createFrame() {
return new BTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter());
}
+
+ @Override
+ public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+ return tupleWriterFactory;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index 1869a1b..c1fc5ba 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -51,7 +51,7 @@
private final static int RESTART_OP = Integer.MIN_VALUE;
private final static int MAX_RESTARTS = 10;
- // the root page never changes
+ // Fixed root page.
private final int rootPage = 1;
private final IFreePageManager freePageManager;
@@ -260,23 +260,19 @@
ctx.pred.setLowKeyComparator(cmp);
if (ctx.pred.getHighKeyComparator() == null)
ctx.pred.setHighKeyComparator(cmp);
-
- boolean repeatOp = true;
// we use this loop to deal with possibly multiple operation restarts
// due to ongoing structure modifications during the descent
+ boolean repeatOp = true;
while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
performOp(rootPage, null, ctx);
-
// if we reach this stage then we need to restart from the (possibly
// new) root
if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
ctx.pageLsns.removeLast(); // pop the restart op indicator
continue;
}
-
repeatOp = false;
}
-
cursor.setBufferCache(bufferCache);
cursor.setFileId(fileId);
}
@@ -308,6 +304,23 @@
ctx.interiorFrame.setPage(originalPage);
}
+ private void reinitRoot(BTreeOpContext ctx) throws HyracksDataException {
+ ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+ pins++;
+ rootNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ ctx.leafFrame.setPage(rootNode);
+ ctx.leafFrame.initBuffer((byte) 0);
+ currentLevel = 0; // debug
+ } finally {
+ rootNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rootNode);
+ unpins++;
+ }
+ }
+
private void createNewRoot(BTreeOpContext ctx) throws Exception {
rootSplits++; // debug
splitsByLevel[currentLevel]++;
@@ -354,7 +367,7 @@
ctx.interiorFrame.setSmFlag(true); // will be cleared later
// in unsetSmPages
ctx.splitKey.setLeftPage(newLeftId);
- int targetTupleIndex = ctx.interiorFrame.findTupleIndex(ctx.splitKey.getTuple(), cmp);
+ int targetTupleIndex = ctx.interiorFrame.findTupleIndex(ctx.splitKey.getTuple(), cmp, false);
ctx.interiorFrame.insert(ctx.splitKey.getTuple(), cmp, targetTupleIndex);
} finally {
newLeftNode.releaseWriteLatch();
@@ -375,9 +388,8 @@
unpins++;
}
}
-
- @Override
- public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+
+ private void insertUpdateOrDelete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
BTreeOpContext ctx = (BTreeOpContext) ictx;
ctx.reset();
ctx.pred.setLowKeyComparator(cmp);
@@ -385,171 +397,231 @@
ctx.pred.setLowKey(tuple, true);
ctx.pred.setHighKey(tuple, true);
ctx.splitKey.reset();
- ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
-
+ ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
+ // We use this loop to deal with possibly multiple operation restarts
+ // due to ongoing structure modifications during the descent.
boolean repeatOp = true;
- // we use this loop to deal with possibly multiple operation restarts
- // due to ongoing structure modifications during the descent
while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
performOp(rootPage, null, ctx);
-
- // if we reach this stage then we need to restart from the (possibly
- // new) root
+ // Do we need to restart from the (possibly new) root?
if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
ctx.pageLsns.removeLast(); // pop the restart op indicator
continue;
}
-
- // we split the root, here is the key for a new root
+ // Split key propagated?
if (ctx.splitKey.getBuffer() != null) {
- createNewRoot(ctx);
+ if (ctx.op == IndexOp.DELETE) {
+ // Reset level of root to zero.
+ reinitRoot(ctx);
+ } else {
+ // Insert or update op. Create a new root.
+ createNewRoot(ctx);
+ }
}
-
unsetSmPages(ctx);
-
+ if (ctx.op == IndexOp.DELETE) {
+ addFreePages(ctx);
+ }
repeatOp = false;
}
}
+
+ @Override
+ public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ insertUpdateOrDelete(tuple, ictx);
+ }
+ @Override
+ public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ insertUpdateOrDelete(tuple, ictx);
+ }
+
+ @Override
+ public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ insertUpdateOrDelete(tuple, ictx);
+ }
+
public long uselessCompressionTime = 0;
private void insertLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
ctx.leafFrame.setPage(node);
ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
-
- int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
+ int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp, true);
FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
switch (spaceStatus) {
-
case SUFFICIENT_CONTIGUOUS_SPACE: {
// System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
- }
break;
-
+ }
case SUFFICIENT_SPACE: {
// System.out.println("SUFFICIENT_SPACE");
boolean slotsChanged = ctx.leafFrame.compact(cmp);
- if (slotsChanged)
- targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
+ if (slotsChanged) {
+ targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp, true);
+ }
ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
- }
break;
-
+ }
case INSUFFICIENT_SPACE: {
// System.out.println("INSUFFICIENT_SPACE");
-
- // try compressing the page first and see if there is space
- // available
+ // Try compressing the page first and see if there is space available.
long start = System.currentTimeMillis();
boolean reCompressed = ctx.leafFrame.compress(cmp);
long end = System.currentTimeMillis();
- if (reCompressed)
+ if (reCompressed) {
spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
-
+ }
if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
-
usefulCompression++;
} else {
-
uselessCompressionTime += (end - start);
uselessCompression++;
+ performLeafSplit(pageId, tuple, ctx);
+ }
+ break;
+ }
+ }
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
+ }
+
+ private void performLeafSplit(int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ splitsByLevel[0]++; // debug
+ int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
+ ICachedPage rightSibling = null;
+ if (rightSiblingPageId > 0) {
+ rightSibling = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightSiblingPageId),
+ false);
+ pins++;
+ }
+ // Lock is released in unsetSmPages(), after sm has fully completed.
+ treeLatch.writeLock().lock();
+ treeLatchesAcquired++;
+ try {
- // perform split
- splitsByLevel[0]++; // debug
- int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
- ICachedPage rightSibling = null;
- if (rightSiblingPageId > 0) {
- rightSibling = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightSiblingPageId),
- false);
- pins++;
- }
+ int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
+ true);
+ pins++;
+ rightNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ IBTreeLeafFrame rightFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) 0);
+ rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
- treeLatch.writeLock().lock(); // lock is released in
- // unsetSmPages(), after sm has
- // fully completed
- treeLatchesAcquired++;
+ int ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
+
+ ctx.smPages.add(pageId);
+ ctx.smPages.add(rightPageId);
+ ctx.leafFrame.setSmFlag(true);
+ rightFrame.setSmFlag(true);
+
+ rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
+ rightFrame.setPrevLeaf(pageId);
+ ctx.leafFrame.setNextLeaf(rightPageId);
+
+ // TODO: we just use increasing numbers as pageLsn,
+ // we
+ // should tie this together with the LogManager and
+ // TransactionManager
+ rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
+ ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+
+ if (ret != 0) {
+ ctx.splitKey.reset();
+ } else {
+ // System.out.print("LEAF SPLITKEY: ");
+ // cmp.printKey(splitKey.getData(), 0);
+ // System.out.println("");
+
+ ctx.splitKey.setPages(pageId, rightPageId);
+ }
+ if (rightSibling != null) {
+ rightSibling.acquireWriteLatch();
+ writeLatchesAcquired++;
try {
-
- int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
- ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
- true);
- pins++;
- rightNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- IBTreeLeafFrame rightFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) 0);
- rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
-
- int ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
-
- ctx.smPages.add(pageId);
- ctx.smPages.add(rightPageId);
- ctx.leafFrame.setSmFlag(true);
- rightFrame.setSmFlag(true);
-
- rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
- rightFrame.setPrevLeaf(pageId);
- ctx.leafFrame.setNextLeaf(rightPageId);
-
- // TODO: we just use increasing numbers as pageLsn,
- // we
- // should tie this together with the LogManager and
- // TransactionManager
- rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
- ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
-
- if (ret != 0) {
- ctx.splitKey.reset();
- } else {
- // System.out.print("LEAF SPLITKEY: ");
- // cmp.printKey(splitKey.getData(), 0);
- // System.out.println("");
-
- ctx.splitKey.setPages(pageId, rightPageId);
- }
- if (rightSibling != null) {
- rightSibling.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- rightFrame.setPage(rightSibling); // reuse
- // rightFrame
- // for
- // modification
- rightFrame.setPrevLeaf(rightPageId);
- } finally {
- rightSibling.releaseWriteLatch();
- writeLatchesReleased++;
- }
- }
- } finally {
- rightNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rightNode);
- unpins++;
- }
- } catch (Exception e) {
- treeLatch.writeLock().unlock();
- treeLatchesReleased++;
- throw e;
+ rightFrame.setPage(rightSibling); // reuse
+ // rightFrame
+ // for
+ // modification
+ rightFrame.setPrevLeaf(rightPageId);
} finally {
- if (rightSibling != null) {
- bufferCache.unpin(rightSibling);
- unpins++;
- }
+ rightSibling.releaseWriteLatch();
+ writeLatchesReleased++;
}
}
+ } finally {
+ rightNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rightNode);
+ unpins++;
}
- break;
-
+ } catch (Exception e) {
+ treeLatch.writeLock().unlock();
+ treeLatchesReleased++;
+ throw e;
+ } finally {
+ if (rightSibling != null) {
+ bufferCache.unpin(rightSibling);
+ unpins++;
+ }
}
-
+ }
+
+ private void updateLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ ctx.leafFrame.setPage(node);
+ ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ int oldTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp, false);
+ FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceUpdate(tuple, oldTupleIndex, cmp);
+ switch (spaceStatus) {
+ case SUFFICIENT_INPLACE_SPACE: {
+ //System.out.println("SUFFICIENT_INPLACE_SPACE");
+ ctx.leafFrame.update(tuple, oldTupleIndex, true);
+ ctx.splitKey.reset();
+ break;
+ }
+ case SUFFICIENT_CONTIGUOUS_SPACE: {
+ //System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
+ ctx.leafFrame.update(tuple, oldTupleIndex, false);
+ ctx.splitKey.reset();
+ break;
+ }
+ case SUFFICIENT_SPACE: {
+ //System.out.println("SUFFICIENT_SPACE");
+ // Delete the old tuple, compact the frame, and insert the new tuple.
+ ctx.leafFrame.delete(tuple, cmp, false);
+ ctx.leafFrame.compact(cmp);
+ int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp, false);
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+ ctx.splitKey.reset();
+ break;
+ }
+ case INSUFFICIENT_SPACE: {
+ //System.out.println("INSUFFICIENT_SPACE");
+ // Delete the old tuple, and try compressing the page to make space available.
+ ctx.leafFrame.delete(tuple, cmp, false);
+ ctx.leafFrame.compress(cmp);
+ // We need to insert the new tuple, so check if there is space.
+ spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+ if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
+ int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp, false);
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+ ctx.splitKey.reset();
+ } else {
+ performLeafSplit(pageId, tuple, ctx);
+ }
+ break;
+ }
+ }
node.releaseWriteLatch();
writeLatchesReleased++;
bufferCache.unpin(node);
@@ -560,8 +632,7 @@
throws Exception {
ctx.interiorFrame.setPage(node);
ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
-
- int targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
+ int targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp, false);
FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
switch (spaceStatus) {
case INSUFFICIENT_SPACE: {
@@ -618,8 +689,9 @@
case SUFFICIENT_SPACE: {
boolean slotsChanged = ctx.interiorFrame.compact(cmp);
+ // TODO: do we really need to check for duplicates here?
if (slotsChanged)
- targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
+ targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp, true);
ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
ctx.splitKey.reset();
}
@@ -628,56 +700,6 @@
}
}
- @Override
- public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
- BTreeOpContext ctx = (BTreeOpContext) ictx;
- ctx.reset();
- ctx.pred.setLowKeyComparator(cmp);
- ctx.pred.setHighKeyComparator(cmp);
- ctx.pred.setLowKey(tuple, true);
- ctx.pred.setHighKey(tuple, true);
- ctx.splitKey.reset();
- ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
-
- boolean repeatOp = true;
- // we use this loop to deal with possibly multiple operation restarts
- // due to ongoing structure modifications during the descent
- while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
- performOp(rootPage, null, ctx);
-
- // if we reach this stage then we need to restart from the (possibly
- // new) root
- if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op indicator
- continue;
- }
-
- // tree is empty, reset level to zero
- if (ctx.splitKey.getBuffer() != null) {
- ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
- pins++;
- rootNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- ctx.leafFrame.setPage(rootNode);
- ctx.leafFrame.initBuffer((byte) 0);
- currentLevel = 0; // debug
- } finally {
- rootNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rootNode);
- unpins++;
- }
- }
-
- unsetSmPages(ctx);
-
- addFreePages(ctx);
-
- repeatOp = false;
- }
- }
-
// TODO: to avoid latch deadlock, must modify cursor to detect empty leaves
private void deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
ctx.leafFrame.setPage(node);
@@ -817,7 +839,7 @@
}
private final void acquireLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
- if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
+ if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE) || op.equals(IndexOp.UPDATE))) {
node.acquireWriteLatch();
writeLatchesAcquired++;
} else {
@@ -827,7 +849,7 @@
}
private final void releaseLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
- if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
+ if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE) || op.equals(IndexOp.UPDATE))) {
node.releaseWriteLatch();
writeLatchesReleased++;
} else {
@@ -870,7 +892,6 @@
// remember trail of pageLsns, to unwind recursion in case of an ongoing
// structure modification
ctx.pageLsns.add(ctx.interiorFrame.getPageLsn());
-
try {
// latch coupling, note: parent should never be write latched,
@@ -884,68 +905,48 @@
if (!isLeaf || smFlag) {
if (!smFlag) {
- // we use this loop to deal with possibly multiple operation
+ // We use this loop to deal with possibly multiple operation
// restarts due to ongoing structure modifications during
- // the descent
+ // the descent.
boolean repeatOp = true;
while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
int childPageId = ctx.interiorFrame.getChildPageId(ctx.pred, cmp);
performOp(childPageId, node, ctx);
if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op
- // indicator
+ // Pop the restart op indicator.
+ ctx.pageLsns.removeLast();
if (isConsistent(pageId, ctx)) {
- node = null; // to avoid unpinning and
- // unlatching node again in
- // recursive call
- continue; // descend the tree again
+ // Don't unpin and unlatch node again in recursive call.
+ node = null;
+ // Descend the tree again.
+ continue;
} else {
- ctx.pageLsns.removeLast(); // pop pageLsn of
- // this page
- // (version seen by this op
- // during descent)
- ctx.pageLsns.add(RESTART_OP); // this node is
- // not
- // consistent,
- // set the
- // restart
- // indicator for
- // upper level
+ // Pop pageLsn of this page (version seen by this op during descent).
+ ctx.pageLsns.removeLast();
+ // This node is not consistent set the restart indicator for upper level.
+ ctx.pageLsns.add(RESTART_OP);
break;
}
}
-
+
switch (ctx.op) {
-
- case INSERT: {
- if (ctx.splitKey.getBuffer() != null) {
- node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
- pins++;
- node.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- insertInterior(node, pageId, ctx.splitKey.getTuple(), ctx);
- } finally {
- node.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- }
- } else {
- unsetSmPages(ctx);
- }
- }
- break;
-
+ case INSERT:
+ case UPDATE:
case DELETE: {
+ // Is there a propagated split key?
if (ctx.splitKey.getBuffer() != null) {
node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
pins++;
node.acquireWriteLatch();
writeLatchesAcquired++;
try {
- deleteInterior(node, pageId, ctx.pred.getLowKey(), ctx);
+ if (ctx.op == IndexOp.DELETE) {
+ deleteInterior(node, pageId, ctx.pred.getLowKey(), ctx);
+ } else {
+ // Insert or update op. Both can cause split keys to propagate upwards.
+ insertInterior(node, pageId, ctx.splitKey.getTuple(), ctx);
+ }
} finally {
node.releaseWriteLatch();
writeLatchesReleased++;
@@ -955,18 +956,15 @@
} else {
unsetSmPages(ctx);
}
- }
break;
-
- case SEARCH: {
- // do nothing
}
+ default: {
+ // Do nothing for SEARCH and DISKORDERSCAN.
break;
-
+ }
}
-
- repeatOp = false; // operation completed
-
+ // Operation completed.
+ repeatOp = false;
} // end while
} else { // smFlag
ctx.opRestarts++;
@@ -995,19 +993,21 @@
switch (ctx.op) {
case INSERT: {
insertLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
- }
break;
-
+ }
+ case UPDATE: {
+ updateLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
+ break;
+ }
case DELETE: {
deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
- }
break;
-
+ }
case SEARCH: {
ctx.cursorInitialState.setPage(node);
ctx.cursor.open(ctx.cursorInitialState, ctx.pred);
- }
break;
+ }
}
}
} catch (TreeIndexException e) {
@@ -1271,12 +1271,7 @@
public int getRootPageId() {
return rootPage;
- }
-
- @Override
- public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
- throw new Exception("BTree Update not implemented.");
- }
+ }
@Override
public int getFieldCount() {
@@ -1287,4 +1282,4 @@
public IndexType getIndexType() {
return IndexType.BTREE;
}
-}
\ No newline at end of file
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeDuplicateKeyException.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeDuplicateKeyException.java
new file mode 100644
index 0000000..1aa7fff
--- /dev/null
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeDuplicateKeyException.java
@@ -0,0 +1,13 @@
+package edu.uci.ics.hyracks.storage.am.btree.impls;
+
+public class BTreeDuplicateKeyException extends BTreeException {
+ private static final long serialVersionUID = 1L;
+
+ public BTreeDuplicateKeyException(Exception e) {
+ super(e);
+ }
+
+ public BTreeDuplicateKeyException(String message) {
+ super(message);
+ }
+}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
index 8019bcb..1658989 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeException.java
@@ -19,8 +19,8 @@
public class BTreeException extends TreeIndexException {
- private static final long serialVersionUID = 1L;
- private boolean handled = false;
+ protected static final long serialVersionUID = 1L;
+ protected boolean handled = false;
public BTreeException(Exception e) {
super(e);
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
index 6d65b14..bacd16c 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
@@ -43,18 +43,18 @@
this.leafFrame = leafFrame;
this.interiorFrame = interiorFrame;
this.metaFrame = metaFrame;
-
pageLsns = new IntArrayList(treeHeightHint, treeHeightHint);
- if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
+ if (op == IndexOp.SEARCH || op == IndexOp.DISKORDERSCAN) {
+ smPages = null;
+ freePages = null;
+ splitKey = null;
+ cursorInitialState = new BTreeCursorInitialState(null);
+ } else {
+ // Insert, update or delete operation.
smPages = new IntArrayList(treeHeightHint, treeHeightHint);
freePages = new IntArrayList(treeHeightHint, treeHeightHint);
pred = new RangePredicate(true, null, null, true, true, null, null);
splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
- } else {
- smPages = null;
- freePages = null;
- splitKey = null;
- cursorInitialState = new BTreeCursorInitialState(null);
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
index 9a655fc..a025d57 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/FieldPrefixTupleReference.java
@@ -90,4 +90,10 @@
public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset) {
frame = null;
}
+
+ @Override
+ public int getTupleSize() {
+ // TODO Auto-generated method stub
+ return 0;
+ }
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
index db458ec..55ec352 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -25,86 +25,80 @@
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public interface ITreeIndexFrame {
- public void setPage(ICachedPage page);
+ public void setPage(ICachedPage page);
- public ICachedPage getPage();
+ public ICachedPage getPage();
- public ByteBuffer getBuffer();
+ public ByteBuffer getBuffer();
- public int findTupleIndex(ITupleReference tuple, MultiComparator cmp)
- throws Exception;
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp, boolean throwIfKeyExists) throws Exception;
- public void insert(ITupleReference tuple, MultiComparator cmp,
- int tupleIndex) throws Exception;
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception;
- public void update(int rid, ITupleReference tuple) throws Exception;
+ public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) throws Exception;
- public void delete(ITupleReference tuple, MultiComparator cmp,
- boolean exactDelete) throws Exception;
+ public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception;
- // returns true if slots were modified, false otherwise
- public boolean compact(MultiComparator cmp);
+ // returns true if slots were modified, false otherwise
+ public boolean compact(MultiComparator cmp);
- public boolean compress(MultiComparator cmp) throws HyracksDataException;
+ public boolean compress(MultiComparator cmp) throws HyracksDataException;
- public void initBuffer(byte level);
+ public void initBuffer(byte level);
- public int getTupleCount();
+ public int getTupleCount();
- // assumption: page must be write-latched at this point
- public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
- MultiComparator cmp);
+ // assumption: page must be write-latched at this point
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp);
- public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple,
- MultiComparator cmp);
+ public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex, MultiComparator cmp);
- public int getTupleOffset(int slotNum);
+ public int getTupleOffset(int slotNum);
- public int getTotalFreeSpace();
+ public int getTotalFreeSpace();
- public void setPageLsn(int pageLsn);
+ public void setPageLsn(int pageLsn);
- public int getPageLsn();
+ public int getPageLsn();
- // for debugging
- public void printHeader();
+ // for debugging
+ public void printHeader();
- public String printKeys(MultiComparator cmp,
- ISerializerDeserializer[] fields) throws HyracksDataException;
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException;
- // TODO; what if tuples more than half-page size?
- public int split(ITreeIndexFrame rightFrame, ITupleReference tuple,
- MultiComparator cmp, ISplitKey splitKey) throws Exception;
+ // TODO; what if tuples more than half-page size?
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
+ throws Exception;
- public ISlotManager getSlotManager();
+ public ISlotManager getSlotManager();
- // ATTENTION: in b-tree operations it may not always be possible to
- // determine whether an ICachedPage is a leaf or interior node
- // a compatible interior and leaf implementation MUST return identical
- // values when given the same ByteBuffer for the functions below
- public boolean isLeaf();
+ // ATTENTION: in b-tree operations it may not always be possible to
+ // determine whether an ICachedPage is a leaf or interior node
+ // a compatible interior and leaf implementation MUST return identical
+ // values when given the same ByteBuffer for the functions below
+ public boolean isLeaf();
- public boolean isInterior();
+ public boolean isInterior();
- public byte getLevel();
+ public byte getLevel();
- public void setLevel(byte level);
+ public void setLevel(byte level);
- public boolean getSmFlag(); // structure modification flag
+ public boolean getSmFlag(); // structure modification flag
- public void setSmFlag(boolean smFlag);
+ public void setSmFlag(boolean smFlag);
- public int getSlotSize();
+ public int getSlotSize();
- // TODO: should be removed after new tuple format
- public void setPageTupleFieldCount(int fieldCount);
+ // TODO: should be removed after new tuple format
+ public void setPageTupleFieldCount(int fieldCount);
- // for debugging
- public int getFreeSpaceOff();
+ // for debugging
+ public int getFreeSpaceOff();
- public void setFreeSpaceOff(int freeSpace);
+ public void setFreeSpaceOff(int freeSpace);
- public ITreeIndexTupleWriter getTupleWriter();
+ public ITreeIndexTupleWriter getTupleWriter();
- public int getPageHeaderSize();
+ public int getPageHeaderSize();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
index 9ec69d9..32c77be 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
@@ -3,5 +3,6 @@
import java.io.Serializable;
public interface ITreeIndexFrameFactory extends Serializable {
- public ITreeIndexFrame createFrame();
+ public ITreeIndexFrame createFrame();
+ public ITreeIndexTupleWriterFactory getTupleWriterFactory();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
index 8b845ac..b989dd9 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleReference.java
@@ -20,11 +20,13 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
public interface ITreeIndexTupleReference extends ITupleReference {
- public void setFieldCount(int fieldCount);
+ public void setFieldCount(int fieldCount);
- public void setFieldCount(int fieldStartIndex, int fieldCount);
+ public void setFieldCount(int fieldStartIndex, int fieldCount);
- public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset);
+ public void resetByTupleOffset(ByteBuffer buf, int tupleStartOffset);
- public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex);
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex);
+
+ public int getTupleSize();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
index 6cd12fb..f0bb7aa 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexTupleWriter.java
@@ -20,20 +20,21 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
public interface ITreeIndexTupleWriter {
- public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
- int targetOff);
+ public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff);
+
+ public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff);
- public int bytesRequired(ITupleReference tuple);
+ public int bytesRequired(ITupleReference tuple);
- public int writeTupleFields(ITupleReference tuple, int startField,
- int numFields, ByteBuffer targetBuf, int targetOff);
+ // TODO: change to byte[] as well.
+ public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+ int targetOff);
- public int bytesRequired(ITupleReference tuple, int startField,
- int numFields);
+ public int bytesRequired(ITupleReference tuple, int startField, int numFields);
- // return a tuplereference instance that can read the tuple written by this
- // writer
- // the main idea is that the format of the written tuple may not be the same
- // as the format written by this writer
- public ITreeIndexTupleReference createTupleReference();
+ // return a tuplereference instance that can read the tuple written by this
+ // writer
+ // the main idea is that the format of the written tuple may not be the same
+ // as the format written by this writer
+ public ITreeIndexTupleReference createTupleReference();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
index f9fd77d..2064753 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
@@ -30,170 +30,131 @@
public abstract class TreeIndexOpHelper {
- protected ITreeIndexFrame interiorFrame;
- protected ITreeIndexFrame leafFrame;
- protected MultiComparator cmp;
+ protected ITreeIndexFrame interiorFrame;
+ protected ITreeIndexFrame leafFrame;
+ protected MultiComparator cmp;
- protected ITreeIndex treeIndex;
- protected int indexFileId = -1;
- protected int partition;
+ protected ITreeIndex treeIndex;
+ protected int indexFileId = -1;
+ protected int partition;
- protected ITreeIndexOperatorDescriptorHelper opDesc;
- protected IHyracksTaskContext ctx;
+ protected ITreeIndexOperatorDescriptorHelper opDesc;
+ protected IHyracksTaskContext ctx;
- protected IndexHelperOpenMode mode;
+ protected IndexHelperOpenMode mode;
- public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
- final IHyracksTaskContext ctx, int partition,
- IndexHelperOpenMode mode) {
- this.opDesc = opDesc;
- this.ctx = ctx;
- this.mode = mode;
- this.partition = partition;
- }
+ public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, final IHyracksTaskContext ctx,
+ int partition, IndexHelperOpenMode mode) {
+ this.opDesc = opDesc;
+ this.ctx = ctx;
+ this.mode = mode;
+ this.partition = partition;
+ }
- public void init() throws HyracksDataException {
- IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(
- ctx);
- IFileMapProvider fileMapProvider = opDesc.getStorageManager()
- .getFileMapProvider(ctx);
- IFileSplitProvider fileSplitProvider = opDesc
- .getTreeIndexFileSplitProvider();
+ public void init() throws HyracksDataException {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
+ IFileSplitProvider fileSplitProvider = opDesc.getTreeIndexFileSplitProvider();
- FileReference f = fileSplitProvider.getFileSplits()[partition]
- .getLocalFile();
- boolean fileIsMapped = fileMapProvider.isMapped(f);
+ FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+ boolean fileIsMapped = fileMapProvider.isMapped(f);
+ if (!fileIsMapped) {
+ bufferCache.createFile(f);
+ }
+ int fileId = fileMapProvider.lookupFileId(f);
+ try {
+ bufferCache.openFile(fileId);
+ } catch (HyracksDataException e) {
+ // Revert state of buffer cache since file failed to open.
+ if (!fileIsMapped) {
+ bufferCache.deleteFile(fileId);
+ }
+ throw e;
+ }
- switch (mode) {
+ // Only set indexFileId member when openFile() succeeds,
+ // otherwise deinit() will try to close the file that failed to open
+ indexFileId = fileId;
- case OPEN: {
- if (!fileIsMapped) {
- throw new HyracksDataException(
- "Trying to open tree index from unmapped file "
- + f.toString());
- }
- }
- break;
+ interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
+ leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
- case CREATE:
- case ENLIST: {
- if (!fileIsMapped) {
- bufferCache.createFile(f);
- }
- }
- break;
+ IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc.getTreeIndexRegistryProvider().getRegistry(ctx);
+ treeIndex = treeIndexRegistry.get(indexFileId);
+ if (treeIndex == null) {
+ // Create new tree and register it.
+ treeIndexRegistry.lock();
+ try {
+ // Check if tree has already been registered by another thread.
+ treeIndex = treeIndexRegistry.get(indexFileId);
+ if (treeIndex == null) {
+ // This thread should create and register the tree.
+ IBinaryComparator[] comparators = new IBinaryComparator[opDesc.getTreeIndexComparatorFactories().length];
+ for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
+ comparators[i] = opDesc.getTreeIndexComparatorFactories()[i].createBinaryComparator();
+ }
+ cmp = createMultiComparator(comparators);
+ treeIndex = createTreeIndex();
+ if (mode == IndexHelperOpenMode.CREATE) {
+ ITreeIndexMetaDataFrame metaFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory()
+ .createFrame();
+ try {
+ treeIndex.create(indexFileId, leafFrame, metaFrame);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+ treeIndex.open(indexFileId);
+ treeIndexRegistry.register(indexFileId, treeIndex);
+ }
+ } finally {
+ treeIndexRegistry.unlock();
+ }
+ }
+ }
- }
+ // MUST be overridden
+ public ITreeIndex createTreeIndex() throws HyracksDataException {
+ throw new HyracksDataException("createTreeIndex Operation not implemented.");
+ }
- int fileId = fileMapProvider.lookupFileId(f);
- try {
- bufferCache.openFile(fileId);
- } catch (HyracksDataException e) {
- // revert state of buffer cache since file failed to open
- if (!fileIsMapped) {
- bufferCache.deleteFile(fileId);
- }
- throw e;
- }
+ // MUST be overridden
+ public MultiComparator createMultiComparator(IBinaryComparator[] comparators) throws HyracksDataException {
+ throw new HyracksDataException("createComparator Operation not implemented.");
+ }
- // only set indexFileId member when openFile() succeeds,
- // otherwise deinit() will try to close the file that failed to open
- indexFileId = fileId;
+ public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
+ return new TreeDiskOrderScanCursor(leafFrame);
+ }
- interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
- leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+ public void deinit() throws HyracksDataException {
+ if (indexFileId != -1) {
+ IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
+ bufferCache.closeFile(indexFileId);
+ }
+ }
- IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc
- .getTreeIndexRegistryProvider().getRegistry(ctx);
- treeIndex = treeIndexRegistry.get(indexFileId);
- if (treeIndex == null) {
+ public ITreeIndex getTreeIndex() {
+ return treeIndex;
+ }
- // create new tree and register it
- treeIndexRegistry.lock();
- try {
- // check if tree has already been registered by another thread
- treeIndex = treeIndexRegistry.get(indexFileId);
- if (treeIndex == null) {
- // this thread should create and register the tree
+ public IHyracksTaskContext getHyracksTaskContext() {
+ return ctx;
+ }
- IBinaryComparator[] comparators = new IBinaryComparator[opDesc
- .getTreeIndexComparatorFactories().length];
- for (int i = 0; i < opDesc
- .getTreeIndexComparatorFactories().length; i++) {
- comparators[i] = opDesc
- .getTreeIndexComparatorFactories()[i]
- .createBinaryComparator();
- }
+ public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
+ return opDesc;
+ }
- cmp = createMultiComparator(comparators);
+ public ITreeIndexFrame getLeafFrame() {
+ return leafFrame;
+ }
- treeIndex = createTreeIndex();
- if (mode == IndexHelperOpenMode.CREATE) {
- ITreeIndexMetaDataFrame metaFrame = treeIndex
- .getFreePageManager().getMetaDataFrameFactory()
- .createFrame();
- try {
- treeIndex.create(indexFileId, leafFrame, metaFrame);
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- }
- treeIndex.open(indexFileId);
- treeIndexRegistry.register(indexFileId, treeIndex);
- }
- } finally {
- treeIndexRegistry.unlock();
- }
- }
- }
+ public ITreeIndexFrame getInteriorFrame() {
+ return interiorFrame;
+ }
- // MUST be overridden
- public ITreeIndex createTreeIndex() throws HyracksDataException {
- throw new HyracksDataException(
- "createTreeIndex Operation not implemented.");
- }
-
- // MUST be overridden
- public MultiComparator createMultiComparator(IBinaryComparator[] comparators)
- throws HyracksDataException {
- throw new HyracksDataException(
- "createComparator Operation not implemented.");
- }
-
- public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame)
- throws HyracksDataException {
- return new TreeDiskOrderScanCursor(leafFrame);
- }
-
- public void deinit() throws HyracksDataException {
- if (indexFileId != -1) {
- IBufferCache bufferCache = opDesc.getStorageManager()
- .getBufferCache(ctx);
- bufferCache.closeFile(indexFileId);
- }
- }
-
- public ITreeIndex getTreeIndex() {
- return treeIndex;
- }
-
- public IHyracksTaskContext getHyracksTaskContext() {
- return ctx;
- }
-
- public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
- return opDesc;
- }
-
- public ITreeIndexFrame getLeafFrame() {
- return leafFrame;
- }
-
- public ITreeIndexFrame getInteriorFrame() {
- return interiorFrame;
- }
-
- public int getIndexFileId() {
- return indexFileId;
- }
+ public int getIndexFileId() {
+ return indexFileId;
+ }
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
index 7114875..da9c815 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/FrameOpSpaceStatus.java
@@ -16,5 +16,5 @@
package edu.uci.ics.hyracks.storage.am.common.frames;
public enum FrameOpSpaceStatus {
- INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE
+ INSUFFICIENT_SPACE, SUFFICIENT_CONTIGUOUS_SPACE, SUFFICIENT_SPACE, SUFFICIENT_INPLACE_SPACE
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
index e5c37ff..82309a4 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
@@ -38,309 +38,322 @@
public abstract class TreeIndexNSMFrame implements ITreeIndexFrame {
- protected static final int pageLsnOff = 0; // 0
- protected static final int tupleCountOff = pageLsnOff + 4; // 4
- protected static final int freeSpaceOff = tupleCountOff + 4; // 8
- protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
- protected static final byte levelOff = totalFreeSpaceOff + 4;
- protected static final byte smFlagOff = levelOff + 1;
+ protected static final int pageLsnOff = 0; // 0
+ protected static final int tupleCountOff = pageLsnOff + 4; // 4
+ protected static final int freeSpaceOff = tupleCountOff + 4; // 8
+ protected static final int totalFreeSpaceOff = freeSpaceOff + 4; // 16
+ protected static final byte levelOff = totalFreeSpaceOff + 4;
+ protected static final byte smFlagOff = levelOff + 1;
- protected ICachedPage page = null;
- protected ByteBuffer buf = null;
- protected ISlotManager slotManager;
+ protected ICachedPage page = null;
+ protected ByteBuffer buf = null;
+ protected ISlotManager slotManager;
- protected ITreeIndexTupleWriter tupleWriter;
- protected ITreeIndexTupleReference frameTuple;
+ protected ITreeIndexTupleWriter tupleWriter;
+ protected ITreeIndexTupleReference frameTuple;
- public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter,
- ISlotManager slotManager) {
- this.tupleWriter = tupleWriter;
- this.frameTuple = tupleWriter.createTupleReference();
- this.slotManager = slotManager;
- }
+ public TreeIndexNSMFrame(ITreeIndexTupleWriter tupleWriter, ISlotManager slotManager) {
+ this.tupleWriter = tupleWriter;
+ this.frameTuple = tupleWriter.createTupleReference();
+ this.slotManager = slotManager;
+ }
- @Override
- public void initBuffer(byte level) {
- buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
- // during creation
- buf.putInt(tupleCountOff, 0);
- resetSpaceParams();
- buf.put(levelOff, level);
- buf.put(smFlagOff, (byte) 0);
- }
+ @Override
+ public void initBuffer(byte level) {
+ buf.putInt(pageLsnOff, 0); // TODO: might to set to a different lsn
+ // during creation
+ buf.putInt(tupleCountOff, 0);
+ resetSpaceParams();
+ buf.put(levelOff, level);
+ buf.put(smFlagOff, (byte) 0);
+ }
- @Override
- public boolean isLeaf() {
- return buf.get(levelOff) == 0;
- }
+ @Override
+ public boolean isLeaf() {
+ return buf.get(levelOff) == 0;
+ }
- @Override
- public boolean isInterior() {
- return buf.get(levelOff) > 0;
- }
+ @Override
+ public boolean isInterior() {
+ return buf.get(levelOff) > 0;
+ }
- @Override
- public byte getLevel() {
- return buf.get(levelOff);
- }
+ @Override
+ public byte getLevel() {
+ return buf.get(levelOff);
+ }
- @Override
- public void setLevel(byte level) {
- buf.put(levelOff, level);
- }
+ @Override
+ public void setLevel(byte level) {
+ buf.put(levelOff, level);
+ }
- @Override
- public boolean getSmFlag() {
- return buf.get(smFlagOff) != 0;
- }
+ @Override
+ public boolean getSmFlag() {
+ return buf.get(smFlagOff) != 0;
+ }
- @Override
- public void setSmFlag(boolean smFlag) {
- if (smFlag)
- buf.put(smFlagOff, (byte) 1);
- else
- buf.put(smFlagOff, (byte) 0);
- }
+ @Override
+ public void setSmFlag(boolean smFlag) {
+ if (smFlag)
+ buf.put(smFlagOff, (byte) 1);
+ else
+ buf.put(smFlagOff, (byte) 0);
+ }
- @Override
- public int getFreeSpaceOff() {
- return buf.getInt(freeSpaceOff);
- }
+ @Override
+ public int getFreeSpaceOff() {
+ return buf.getInt(freeSpaceOff);
+ }
- @Override
- public void setFreeSpaceOff(int freeSpace) {
- buf.putInt(freeSpaceOff, freeSpace);
- }
+ @Override
+ public void setFreeSpaceOff(int freeSpace) {
+ buf.putInt(freeSpaceOff, freeSpace);
+ }
- @Override
- public void setPage(ICachedPage page) {
- this.page = page;
- this.buf = page.getBuffer();
- slotManager.setFrame(this);
- }
+ @Override
+ public void setPage(ICachedPage page) {
+ this.page = page;
+ this.buf = page.getBuffer();
+ slotManager.setFrame(this);
+ }
- @Override
- public ByteBuffer getBuffer() {
- return page.getBuffer();
- }
+ @Override
+ public ByteBuffer getBuffer() {
+ return page.getBuffer();
+ }
- @Override
- public ICachedPage getPage() {
- return page;
- }
+ @Override
+ public ICachedPage getPage() {
+ return page;
+ }
- @Override
- public boolean compact(MultiComparator cmp) {
- resetSpaceParams();
- frameTuple.setFieldCount(cmp.getFieldCount());
+ @Override
+ public boolean compact(MultiComparator cmp) {
+ resetSpaceParams();
+ frameTuple.setFieldCount(cmp.getFieldCount());
- int tupleCount = buf.getInt(tupleCountOff);
- int freeSpace = buf.getInt(freeSpaceOff);
+ int tupleCount = buf.getInt(tupleCountOff);
+ int freeSpace = buf.getInt(freeSpaceOff);
- ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
- sortedTupleOffs.ensureCapacity(tupleCount);
- for (int i = 0; i < tupleCount; i++) {
- int slotOff = slotManager.getSlotOff(i);
- int tupleOff = slotManager.getTupleOff(slotOff);
- sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
- }
- Collections.sort(sortedTupleOffs);
+ ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+ sortedTupleOffs.ensureCapacity(tupleCount);
+ for (int i = 0; i < tupleCount; i++) {
+ int slotOff = slotManager.getSlotOff(i);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+ }
+ Collections.sort(sortedTupleOffs);
- for (int i = 0; i < sortedTupleOffs.size(); i++) {
- int tupleOff = sortedTupleOffs.get(i).tupleOff;
- frameTuple.resetByTupleOffset(buf, tupleOff);
+ for (int i = 0; i < sortedTupleOffs.size(); i++) {
+ int tupleOff = sortedTupleOffs.get(i).tupleOff;
+ frameTuple.resetByTupleOffset(buf, tupleOff);
- int tupleEndOff = frameTuple.getFieldStart(frameTuple
- .getFieldCount() - 1)
- + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
- int tupleLength = tupleEndOff - tupleOff;
- System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace,
- tupleLength);
+ int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+ int tupleLength = tupleEndOff - tupleOff;
+ System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
- slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
- freeSpace += tupleLength;
- }
+ slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+ freeSpace += tupleLength;
+ }
- buf.putInt(freeSpaceOff, freeSpace);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount
- * slotManager.getSlotSize());
+ buf.putInt(freeSpaceOff, freeSpace);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
- return false;
- }
+ return false;
+ }
- @Override
- public void delete(ITupleReference tuple, MultiComparator cmp,
- boolean exactDelete) throws Exception {
+ @Override
+ public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp,
- FindTupleMode.FTM_EXACT,
- FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- int slotOff = slotManager.getSlotOff(tupleIndex);
- if (tupleIndex < 0) {
- throw new TreeIndexException("Key to be deleted does not exist.");
- } else {
- if (exactDelete) {
- // check the non-key columns for equality by byte-by-byte
- // comparison
- int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByTupleOffset(buf, tupleOff);
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ int slotOff = slotManager.getSlotOff(tupleIndex);
+ if (tupleIndex < 0) {
+ throw new TreeIndexException("Key to be deleted does not exist.");
+ } else {
+ if (exactDelete) {
+ // check the non-key columns for equality by byte-by-byte
+ // comparison
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
- int comparison = cmp.fieldRangeCompare(tuple, frameTuple,
- cmp.getKeyFieldCount() - 1,
- cmp.getFieldCount() - cmp.getKeyFieldCount());
- if (comparison != 0) {
- throw new TreeIndexException(
- "Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
- }
- }
+ int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1,
+ cmp.getFieldCount() - cmp.getKeyFieldCount());
+ if (comparison != 0) {
+ throw new TreeIndexException(
+ "Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
+ }
+ }
- int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int tupleSize = tupleWriter.bytesRequired(frameTuple);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int tupleSize = tupleWriter.bytesRequired(frameTuple);
- // perform deletion (we just do a memcpy to overwrite the slot)
- int slotStartOff = slotManager.getSlotEndOff();
- int length = slotOff - slotStartOff;
- System.arraycopy(buf.array(), slotStartOff, buf.array(),
- slotStartOff + slotManager.getSlotSize(), length);
+ // perform deletion (we just do a memcpy to overwrite the slot)
+ int slotStartOff = slotManager.getSlotEndOff();
+ int length = slotOff - slotStartOff;
+ System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
- // maintain space information
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
- + tupleSize + slotManager.getSlotSize());
- }
- }
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+ }
+ }
- @Override
- public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
- MultiComparator cmp) {
- int bytesRequired = tupleWriter.bytesRequired(tuple);
- if (bytesRequired + slotManager.getSlotSize() <= buf.capacity()
- - buf.getInt(freeSpaceOff)
- - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
- return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
- else if (bytesRequired + slotManager.getSlotSize() <= buf
- .getInt(totalFreeSpaceOff))
- return FrameOpSpaceStatus.SUFFICIENT_SPACE;
- else
- return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
- }
+ @Override
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ int bytesRequired = tupleWriter.bytesRequired(tuple);
+ // Enough space in the contiguous space region?
+ if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
+ - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ }
+ // Enough space after compaction?
+ if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff)) {
+ return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+ }
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+ }
- @Override
- public FrameOpSpaceStatus hasSpaceUpdate(int rid, ITupleReference tuple,
- MultiComparator cmp) {
- // TODO Auto-generated method stub
- return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
- }
+ @Override
+ public FrameOpSpaceStatus hasSpaceUpdate(ITupleReference newTuple, int oldTupleIndex, MultiComparator cmp) {
+ frameTuple.resetByTupleIndex(this, oldTupleIndex);
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ int oldTupleBytes = frameTuple.getTupleSize();
+ int newTupleBytes = tupleWriter.bytesRequired(newTuple);
+ int additionalBytesRequired = newTupleBytes - oldTupleBytes;
+ // Enough space for an in-place update?
+ if (additionalBytesRequired <= 0) {
+ return FrameOpSpaceStatus.SUFFICIENT_INPLACE_SPACE;
+ }
+ // Enough space if we delete the old tuple and insert the new one without compaction?
+ if (newTupleBytes <= buf.capacity() - buf.getInt(freeSpaceOff)
+ - (buf.getInt(tupleCountOff) * slotManager.getSlotSize())) {
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ }
+ // Enough space if we delete the old tuple and compact?
+ if (additionalBytesRequired <= buf.getInt(totalFreeSpaceOff)) {
+ return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+ }
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+ }
- protected void resetSpaceParams() {
- buf.putInt(freeSpaceOff, smFlagOff + 1);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
- }
+ protected void resetSpaceParams() {
+ buf.putInt(freeSpaceOff, smFlagOff + 1);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
+ }
- @Override
- public int findTupleIndex(ITupleReference tuple, MultiComparator cmp)
- throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- return slotManager.findTupleIndex(tuple, frameTuple, cmp,
- FindTupleMode.FTM_INCLUSIVE,
- FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
- }
+ @Override
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp, boolean throwIfKeyExists) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ }
- @Override
- public void insert(ITupleReference tuple, MultiComparator cmp,
- int tupleIndex) throws Exception {
- slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
- int bytesWritten = tupleWriter.writeTuple(tuple, buf,
- buf.getInt(freeSpaceOff));
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
- - bytesWritten - slotManager.getSlotSize());
- }
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ }
- @Override
- public void update(int rid, ITupleReference tuple) throws Exception {
- // TODO Auto-generated method stub
+ @Override
+ public void update(ITupleReference newTuple, int oldTupleIndex, boolean inPlace) throws Exception {
+ frameTuple.resetByTupleIndex(this, oldTupleIndex);
+ int oldTupleBytes = frameTuple.getTupleSize();
+ int slotOff = slotManager.getSlotOff(oldTupleIndex);
+ int bytesWritten = 0;
+ if (inPlace) {
+ // Overwrite the old tuple in place.
+ bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), buf.getInt(slotOff));
+ } else {
+ // Insert the new tuple at the end of the free space, and change the slot value (effectively "deleting" the old tuple).
+ int newTupleOff = buf.getInt(freeSpaceOff);
+ bytesWritten = tupleWriter.writeTuple(newTuple, buf.array(), newTupleOff);
+ // Update slot value.
+ buf.putInt(slotOff, newTupleOff);
+ // Update contiguous free space pointer.
+ buf.putInt(freeSpaceOff, newTupleOff + bytesWritten);
+ }
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + oldTupleBytes - bytesWritten);
+ }
- }
+ @Override
+ public void printHeader() {
+ // TODO Auto-generated method stub
- @Override
- public void printHeader() {
- // TODO Auto-generated method stub
+ }
- }
+ @Override
+ public int getTupleCount() {
+ return buf.getInt(tupleCountOff);
+ }
- @Override
- public int getTupleCount() {
- return buf.getInt(tupleCountOff);
- }
+ public ISlotManager getSlotManager() {
+ return slotManager;
+ }
- public ISlotManager getSlotManager() {
- return slotManager;
- }
+ @Override
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ int tupleCount = buf.getInt(tupleCountOff);
+ frameTuple.setFieldCount(fields.length);
+ for (int i = 0; i < tupleCount; i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = fields[j].deserialize(dataIn);
+ strBuilder.append(o.toString() + " ");
+ }
+ strBuilder.append(" | ");
+ }
+ strBuilder.append("\n");
+ return strBuilder.toString();
+ }
- @Override
- public String printKeys(MultiComparator cmp,
- ISerializerDeserializer[] fields) throws HyracksDataException {
- StringBuilder strBuilder = new StringBuilder();
- int tupleCount = buf.getInt(tupleCountOff);
- frameTuple.setFieldCount(fields.length);
- for (int i = 0; i < tupleCount; i++) {
- frameTuple.resetByTupleIndex(this, i);
- for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(
- frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j),
- frameTuple.getFieldLength(j));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = fields[j].deserialize(dataIn);
- strBuilder.append(o.toString() + " ");
- }
- strBuilder.append(" | ");
- }
- strBuilder.append("\n");
- return strBuilder.toString();
- }
+ @Override
+ public int getTupleOffset(int slotNum) {
+ return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum * slotManager.getSlotSize());
+ }
- @Override
- public int getTupleOffset(int slotNum) {
- return slotManager.getTupleOff(slotManager.getSlotStartOff() - slotNum
- * slotManager.getSlotSize());
- }
+ @Override
+ public int getPageLsn() {
+ return buf.getInt(pageLsnOff);
+ }
- @Override
- public int getPageLsn() {
- return buf.getInt(pageLsnOff);
- }
+ @Override
+ public void setPageLsn(int pageLsn) {
+ buf.putInt(pageLsnOff, pageLsn);
+ }
- @Override
- public void setPageLsn(int pageLsn) {
- buf.putInt(pageLsnOff, pageLsn);
- }
+ @Override
+ public int getTotalFreeSpace() {
+ return buf.getInt(totalFreeSpaceOff);
+ }
- @Override
- public int getTotalFreeSpace() {
- return buf.getInt(totalFreeSpaceOff);
- }
+ @Override
+ public boolean compress(MultiComparator cmp) {
+ return false;
+ }
- @Override
- public boolean compress(MultiComparator cmp) {
- return false;
- }
+ @Override
+ public int getSlotSize() {
+ return slotManager.getSlotSize();
+ }
- @Override
- public int getSlotSize() {
- return slotManager.getSlotSize();
- }
+ @Override
+ public void setPageTupleFieldCount(int fieldCount) {
+ frameTuple.setFieldCount(fieldCount);
+ }
- @Override
- public void setPageTupleFieldCount(int fieldCount) {
- frameTuple.setFieldCount(fieldCount);
- }
-
- public ITreeIndexTupleWriter getTupleWriter() {
- return tupleWriter;
- }
+ public ITreeIndexTupleWriter getTupleWriter() {
+ return tupleWriter;
+ }
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
index 353bd95..a470d04 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleReference.java
@@ -22,77 +22,78 @@
public class SimpleTupleReference implements ITreeIndexTupleReference {
- protected ByteBuffer buf;
- protected int fieldStartIndex;
- protected int fieldCount;
- protected int tupleStartOff;
- protected int nullFlagsBytes;
- protected int fieldSlotsBytes;
+ protected ByteBuffer buf;
+ protected int fieldStartIndex;
+ protected int fieldCount;
+ protected int tupleStartOff;
+ protected int nullFlagsBytes;
+ protected int fieldSlotsBytes;
+
+ @Override
+ public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
+ this.buf = buf;
+ this.tupleStartOff = tupleStartOff;
+ }
+
+ @Override
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+ resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+ }
+
+ @Override
+ public void setFieldCount(int fieldCount) {
+ this.fieldCount = fieldCount;
+ nullFlagsBytes = getNullFlagsBytes();
+ fieldSlotsBytes = getFieldSlotsBytes();
+ fieldStartIndex = 0;
+ }
+
+ @Override
+ public void setFieldCount(int fieldStartIndex, int fieldCount) {
+ this.fieldCount = fieldCount;
+ this.fieldStartIndex = fieldStartIndex;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return fieldCount;
+ }
+
+ @Override
+ public byte[] getFieldData(int fIdx) {
+ return buf.array();
+ }
+
+ @Override
+ public int getFieldLength(int fIdx) {
+ if (fIdx == 0) {
+ return buf.getShort(tupleStartOff + nullFlagsBytes);
+ } else {
+ return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
+ - buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
+ }
+ }
+
+ @Override
+ public int getFieldStart(int fIdx) {
+ if (fIdx == 0) {
+ return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
+ } else {
+ return tupleStartOff + nullFlagsBytes + fieldSlotsBytes
+ + buf.getShort(tupleStartOff + nullFlagsBytes + ((fIdx - 1) * 2));
+ }
+ }
+
+ protected int getNullFlagsBytes() {
+ return (int) Math.ceil(fieldCount / 8.0);
+ }
+
+ protected int getFieldSlotsBytes() {
+ return fieldCount * 2;
+ }
@Override
- public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
- this.buf = buf;
- this.tupleStartOff = tupleStartOff;
- }
-
- @Override
- public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
- resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
- }
-
- @Override
- public void setFieldCount(int fieldCount) {
- this.fieldCount = fieldCount;
- nullFlagsBytes = getNullFlagsBytes();
- fieldSlotsBytes = getFieldSlotsBytes();
- fieldStartIndex = 0;
- }
-
- @Override
- public void setFieldCount(int fieldStartIndex, int fieldCount) {
- this.fieldCount = fieldCount;
- this.fieldStartIndex = fieldStartIndex;
- }
-
- @Override
- public int getFieldCount() {
- return fieldCount;
- }
-
- @Override
- public byte[] getFieldData(int fIdx) {
- return buf.array();
- }
-
- @Override
- public int getFieldLength(int fIdx) {
- if (fIdx == 0) {
- return buf.getShort(tupleStartOff + nullFlagsBytes);
- } else {
- return buf.getShort(tupleStartOff + nullFlagsBytes + fIdx * 2)
- - buf.getShort(tupleStartOff + nullFlagsBytes
- + ((fIdx - 1) * 2));
- }
- }
-
- @Override
- public int getFieldStart(int fIdx) {
- if (fIdx == 0) {
- return tupleStartOff + nullFlagsBytes + fieldSlotsBytes;
- } else {
- return tupleStartOff
- + nullFlagsBytes
- + fieldSlotsBytes
- + buf.getShort(tupleStartOff + nullFlagsBytes
- + ((fIdx - 1) * 2));
- }
- }
-
- protected int getNullFlagsBytes() {
- return (int) Math.ceil(fieldCount / 8.0);
- }
-
- protected int getFieldSlotsBytes() {
- return fieldCount * 2;
+ public int getTupleSize() {
+ return nullFlagsBytes + fieldSlotsBytes + buf.getShort(tupleStartOff + nullFlagsBytes + (fieldCount-1) * 2);
}
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
index 11f7820..831247e 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
@@ -23,95 +23,103 @@
public class SimpleTupleWriter implements ITreeIndexTupleWriter {
- @Override
- public int bytesRequired(ITupleReference tuple) {
- int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
- for (int i = 0; i < tuple.getFieldCount(); i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
+ // Write short in little endian to target byte array at given offset.
+ private static void writeShortL(short s, byte[] buf, int targetOff) {
+ buf[targetOff] = (byte)(s >> 8);
+ buf[targetOff + 1] = (byte)(s >> 0);
+ }
+
+ // Write short in big endian to target byte array at given offset.
+ private static void writeShortB(short s, byte[] buf, int targetOff) {
+ buf[targetOff] = (byte) (s >> 0);
+ buf[targetOff + 1] = (byte) (s >> 8);
+ }
+
+ @Override
+ public int bytesRequired(ITupleReference tuple) {
+ int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
+
+ @Override
+ public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+ int bytes = getNullFlagsBytes(tuple, startField, numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+ for (int i = startField; i < startField + numFields; i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
+
+ @Override
+ public ITreeIndexTupleReference createTupleReference() {
+ return new SimpleTupleReference();
+ }
+
+ @Override
+ public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+ return writeTuple(tuple, targetBuf.array(), targetOff);
+ }
+
+ @Override
+ public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(tuple);
+ int fieldSlotsBytes = getFieldSlotsBytes(tuple);
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf[runner++] = (byte) 0;
+ }
+ runner += fieldSlotsBytes;
+ int fieldEndOff = 0;
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner,
+ tuple.getFieldLength(i));
+ fieldEndOff += tuple.getFieldLength(i);
+ runner += tuple.getFieldLength(i);
+ writeShortL((short) fieldEndOff, targetBuf, targetOff + nullFlagsBytes + i * 2);
+ }
+ return runner - targetOff;
}
- @Override
- public int bytesRequired(ITupleReference tuple, int startField,
- int numFields) {
- int bytes = getNullFlagsBytes(tuple, startField, numFields)
- + getFieldSlotsBytes(tuple, startField, numFields);
- for (int i = startField; i < startField + numFields; i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
- }
+ @Override
+ public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+ int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
+ runner += getFieldSlotsBytes(tuple, startField, numFields);
- @Override
- public ITreeIndexTupleReference createTupleReference() {
- return new SimpleTupleReference();
- }
+ int fieldEndOff = 0;
+ int fieldCounter = 0;
+ for (int i = startField; i < startField + numFields; i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
+ fieldEndOff += tuple.getFieldLength(i);
+ runner += tuple.getFieldLength(i);
+ targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2, (short) fieldEndOff);
+ fieldCounter++;
+ }
- @Override
- public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
- int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple);
- int fieldSlotsBytes = getFieldSlotsBytes(tuple);
- for (int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte) 0);
- }
- runner += fieldSlotsBytes;
+ return runner - targetOff;
+ }
- int fieldEndOff = 0;
- for (int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
- targetBuf.array(), runner, tuple.getFieldLength(i));
- fieldEndOff += tuple.getFieldLength(i);
- runner += tuple.getFieldLength(i);
- targetBuf.putShort(targetOff + nullFlagsBytes + i * 2,
- (short) fieldEndOff);
- }
+ protected int getNullFlagsBytes(ITupleReference tuple) {
+ return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+ }
- return runner - targetOff;
- }
+ protected int getFieldSlotsBytes(ITupleReference tuple) {
+ return tuple.getFieldCount() * 2;
+ }
- @Override
- public int writeTupleFields(ITupleReference tuple, int startField,
- int numFields, ByteBuffer targetBuf, int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple, startField, numFields);
- for (int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte) 0);
- }
- runner += getFieldSlotsBytes(tuple, startField, numFields);
+ protected int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
+ return (int) Math.ceil((double) numFields / 8.0);
+ }
- int fieldEndOff = 0;
- int fieldCounter = 0;
- for (int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
- targetBuf.array(), runner, tuple.getFieldLength(i));
- fieldEndOff += tuple.getFieldLength(i);
- runner += tuple.getFieldLength(i);
- targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2,
- (short) fieldEndOff);
- fieldCounter++;
- }
-
- return runner - targetOff;
- }
-
- protected int getNullFlagsBytes(ITupleReference tuple) {
- return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
- }
-
- protected int getFieldSlotsBytes(ITupleReference tuple) {
- return tuple.getFieldCount() * 2;
- }
-
- protected int getNullFlagsBytes(ITupleReference tuple, int startField,
- int numFields) {
- return (int) Math.ceil((double) numFields / 8.0);
- }
-
- protected int getFieldSlotsBytes(ITupleReference tuple, int startField,
- int numFields) {
- return numFields * 2;
- }
+ protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+ return numFields * 2;
+ }
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
index 31b32e6..72a36a1 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleReference.java
@@ -22,99 +22,105 @@
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
public class TypeAwareTupleReference implements ITreeIndexTupleReference {
- protected ByteBuffer buf;
- protected int fieldStartIndex;
- protected int fieldCount;
- protected int tupleStartOff;
- protected int nullFlagsBytes;
- protected int dataStartOff;
+ protected ByteBuffer buf;
+ protected int fieldStartIndex;
+ protected int fieldCount;
+ protected int tupleStartOff;
+ protected int nullFlagsBytes;
+ protected int dataStartOff;
- protected ITypeTrait[] typeTraits;
- protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
- protected int[] decodedFieldSlots;
+ protected ITypeTrait[] typeTraits;
+ protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+ protected int[] decodedFieldSlots;
- public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- this.fieldStartIndex = 0;
- }
+ public TypeAwareTupleReference(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ this.fieldStartIndex = 0;
+ setFieldCount(typeTraits.length);
+ }
+
+ @Override
+ public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
+ this.buf = buf;
+ this.tupleStartOff = tupleStartOff;
+
+ // decode field slots
+ int field = 0;
+ int cumul = 0;
+ int end = fieldStartIndex + fieldCount;
+ encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
+ for (int i = fieldStartIndex; i < end; i++) {
+ int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
+ if (staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
+ cumul += encDec.decode();
+ decodedFieldSlots[field++] = cumul;
+ } else {
+ cumul += staticDataLen;
+ decodedFieldSlots[field++] = cumul;
+ }
+ }
+ dataStartOff = encDec.getPos();
+ }
+
+ @Override
+ public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
+ resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
+ }
+
+ @Override
+ public void setFieldCount(int fieldCount) {
+ this.fieldCount = fieldCount;
+ if (decodedFieldSlots == null) {
+ decodedFieldSlots = new int[fieldCount];
+ } else {
+ if (fieldCount > decodedFieldSlots.length) {
+ decodedFieldSlots = new int[fieldCount];
+ }
+ }
+ nullFlagsBytes = getNullFlagsBytes();
+ this.fieldStartIndex = 0;
+ }
+
+ @Override
+ public void setFieldCount(int fieldStartIndex, int fieldCount) {
+ setFieldCount(fieldCount);
+ this.fieldStartIndex = fieldStartIndex;
+ }
+
+ @Override
+ public int getFieldCount() {
+ return fieldCount;
+ }
+
+ @Override
+ public byte[] getFieldData(int fIdx) {
+ return buf.array();
+ }
+
+ @Override
+ public int getFieldLength(int fIdx) {
+ if (fIdx == 0) {
+ return decodedFieldSlots[0];
+ } else {
+ return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx - 1];
+ }
+ }
+
+ @Override
+ public int getFieldStart(int fIdx) {
+ if (fIdx == 0) {
+ return dataStartOff;
+ } else {
+ return dataStartOff + decodedFieldSlots[fIdx - 1];
+ }
+ }
+
+ protected int getNullFlagsBytes() {
+ return (int) Math.ceil(fieldCount / 8.0);
+ }
@Override
- public void resetByTupleOffset(ByteBuffer buf, int tupleStartOff) {
- this.buf = buf;
- this.tupleStartOff = tupleStartOff;
-
- // decode field slots
- int field = 0;
- int cumul = 0;
- int end = fieldStartIndex + fieldCount;
- encDec.reset(buf.array(), tupleStartOff + nullFlagsBytes);
- for (int i = fieldStartIndex; i < end; i++) {
- int staticDataLen = typeTraits[i].getStaticallyKnownDataLength();
- if (staticDataLen == ITypeTrait.VARIABLE_LENGTH) {
- cumul += encDec.decode();
- decodedFieldSlots[field++] = cumul;
- } else {
- cumul += staticDataLen;
- decodedFieldSlots[field++] = cumul;
- }
- }
- dataStartOff = encDec.getPos();
- }
-
- @Override
- public void resetByTupleIndex(ITreeIndexFrame frame, int tupleIndex) {
- resetByTupleOffset(frame.getBuffer(), frame.getTupleOffset(tupleIndex));
- }
-
- @Override
- public void setFieldCount(int fieldCount) {
- this.fieldCount = fieldCount;
- if (decodedFieldSlots == null) {
- decodedFieldSlots = new int[fieldCount];
- } else {
- if (fieldCount > decodedFieldSlots.length) {
- decodedFieldSlots = new int[fieldCount];
- }
- }
- nullFlagsBytes = getNullFlagsBytes();
- this.fieldStartIndex = 0;
- }
-
- @Override
- public void setFieldCount(int fieldStartIndex, int fieldCount) {
- setFieldCount(fieldCount);
- this.fieldStartIndex = fieldStartIndex;
- }
-
- @Override
- public int getFieldCount() {
- return fieldCount;
- }
-
- @Override
- public byte[] getFieldData(int fIdx) {
- return buf.array();
- }
-
- @Override
- public int getFieldLength(int fIdx) {
- if (fIdx == 0) {
- return decodedFieldSlots[0];
- } else {
- return decodedFieldSlots[fIdx] - decodedFieldSlots[fIdx - 1];
- }
- }
-
- @Override
- public int getFieldStart(int fIdx) {
- if (fIdx == 0) {
- return dataStartOff;
- } else {
- return dataStartOff + decodedFieldSlots[fIdx - 1];
- }
- }
-
- protected int getNullFlagsBytes() {
- return (int) Math.ceil(fieldCount / 8.0);
+ public int getTupleSize() {
+ return dataStartOff - tupleStartOff + decodedFieldSlots[fieldCount-1];
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
index 95468d4..56c4d10 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
@@ -24,131 +24,130 @@
public class TypeAwareTupleWriter implements ITreeIndexTupleWriter {
- protected ITypeTrait[] typeTraits;
- protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
+ protected ITypeTrait[] typeTraits;
+ protected VarLenIntEncoderDecoder encDec = new VarLenIntEncoderDecoder();
- public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- }
+ public TypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ }
- @Override
- public int bytesRequired(ITupleReference tuple) {
- int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
- for (int i = 0; i < tuple.getFieldCount(); i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
- }
+ @Override
+ public int bytesRequired(ITupleReference tuple) {
+ int bytes = getNullFlagsBytes(tuple) + getFieldSlotsBytes(tuple);
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
- @Override
- public int bytesRequired(ITupleReference tuple, int startField,
- int numFields) {
- int bytes = getNullFlagsBytes(numFields)
- + getFieldSlotsBytes(tuple, startField, numFields);
- for (int i = startField; i < startField + numFields; i++) {
- bytes += tuple.getFieldLength(i);
- }
- return bytes;
- }
+ @Override
+ public int bytesRequired(ITupleReference tuple, int startField, int numFields) {
+ int bytes = getNullFlagsBytes(numFields) + getFieldSlotsBytes(tuple, startField, numFields);
+ for (int i = startField; i < startField + numFields; i++) {
+ bytes += tuple.getFieldLength(i);
+ }
+ return bytes;
+ }
- @Override
- public ITreeIndexTupleReference createTupleReference() {
- return new TypeAwareTupleReference(typeTraits);
- }
+ @Override
+ public ITreeIndexTupleReference createTupleReference() {
+ return new TypeAwareTupleReference(typeTraits);
+ }
- @Override
- public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf,
- int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(tuple);
- // write null indicator bits
- for (int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte) 0);
- }
+ @Override
+ public int writeTuple(ITupleReference tuple, ByteBuffer targetBuf, int targetOff) {
+ return writeTuple(tuple, targetBuf.array(), targetOff);
+ }
- // write field slots for variable length fields
- encDec.reset(targetBuf.array(), runner);
- for (int i = 0; i < tuple.getFieldCount(); i++) {
- if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- encDec.encode(tuple.getFieldLength(i));
- }
- }
- runner = encDec.getPos();
+ @Override
+ public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(tuple);
+ // write null indicator bits
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf[runner++] = (byte) 0;
+ }
- // write data fields
- for (int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
- targetBuf.array(), runner, tuple.getFieldLength(i));
- runner += tuple.getFieldLength(i);
- }
+ // write field slots for variable length fields
+ encDec.reset(targetBuf, runner);
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ encDec.encode(tuple.getFieldLength(i));
+ }
+ }
+ runner = encDec.getPos();
- return runner - targetOff;
- }
+ // write data fields
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf, runner,
+ tuple.getFieldLength(i));
+ runner += tuple.getFieldLength(i);
+ }
- @Override
- public int writeTupleFields(ITupleReference tuple, int startField,
- int numFields, ByteBuffer targetBuf, int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(numFields);
- // write null indicator bits
- for (int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte) 0);
- }
+ return runner - targetOff;
+ }
+
+ @Override
+ public int writeTupleFields(ITupleReference tuple, int startField, int numFields, ByteBuffer targetBuf,
+ int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(numFields);
+ // write null indicator bits
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
- // write field slots for variable length fields
- encDec.reset(targetBuf.array(), runner);
- for (int i = startField; i < startField + numFields; i++) {
- if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- encDec.encode(tuple.getFieldLength(i));
- }
- }
- runner = encDec.getPos();
+ // write field slots for variable length fields
+ encDec.reset(targetBuf.array(), runner);
+ for (int i = startField; i < startField + numFields; i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ encDec.encode(tuple.getFieldLength(i));
+ }
+ }
+ runner = encDec.getPos();
- for (int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
- targetBuf.array(), runner, tuple.getFieldLength(i));
- runner += tuple.getFieldLength(i);
- }
+ for (int i = startField; i < startField + numFields; i++) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
+ runner += tuple.getFieldLength(i);
+ }
- return runner - targetOff;
- }
+ return runner - targetOff;
+ }
- protected int getNullFlagsBytes(ITupleReference tuple) {
- return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
- }
+ protected int getNullFlagsBytes(ITupleReference tuple) {
+ return (int) Math.ceil((double) tuple.getFieldCount() / 8.0);
+ }
- protected int getFieldSlotsBytes(ITupleReference tuple) {
- int fieldSlotBytes = 0;
- for (int i = 0; i < tuple.getFieldCount(); i++) {
- if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- fieldSlotBytes += encDec.getBytesRequired(tuple
- .getFieldLength(i));
- }
- }
- return fieldSlotBytes;
- }
+ protected int getFieldSlotsBytes(ITupleReference tuple) {
+ int fieldSlotBytes = 0;
+ for (int i = 0; i < tuple.getFieldCount(); i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+ }
+ }
+ return fieldSlotBytes;
+ }
- protected int getNullFlagsBytes(int numFields) {
- return (int) Math.ceil((double) numFields / 8.0);
- }
+ protected int getNullFlagsBytes(int numFields) {
+ return (int) Math.ceil((double) numFields / 8.0);
+ }
- protected int getFieldSlotsBytes(ITupleReference tuple, int startField,
- int numFields) {
- int fieldSlotBytes = 0;
- for (int i = startField; i < startField + numFields; i++) {
- if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- fieldSlotBytes += encDec.getBytesRequired(tuple
- .getFieldLength(i));
- }
- }
- return fieldSlotBytes;
- }
+ protected int getFieldSlotsBytes(ITupleReference tuple, int startField, int numFields) {
+ int fieldSlotBytes = 0;
+ for (int i = startField; i < startField + numFields; i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ fieldSlotBytes += encDec.getBytesRequired(tuple.getFieldLength(i));
+ }
+ }
+ return fieldSlotBytes;
+ }
- public ITypeTrait[] getTypeTraits() {
- return typeTraits;
- }
+ public ITypeTrait[] getTypeTraits() {
+ return typeTraits;
+ }
- public void setTypeTraits(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- }
+ public void setTypeTraits(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ }
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index 38d2a46..18c0d38 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -40,685 +40,603 @@
import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements
- IRTreeInteriorFrame {
+public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
- private static final int childPtrSize = 4;
+ private static final int childPtrSize = 4;
- public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter,
- int keyFieldCount) {
- super(tupleWriter, keyFieldCount);
- }
+ public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ super(tupleWriter, keyFieldCount);
+ }
- @Override
- public String printKeys(MultiComparator cmp,
- ISerializerDeserializer[] fields) throws HyracksDataException {
- StringBuilder strBuilder = new StringBuilder();
- int tupleCount = buf.getInt(tupleCountOff);
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- for (int i = 0; i < tupleCount; i++) {
- frameTuple.resetByTupleIndex(this, i);
- for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(
- frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j),
- frameTuple.getFieldLength(j));
- DataInput dataIn = new DataInputStream(inStream);
- Object o = fields[j].deserialize(dataIn);
- strBuilder.append(o.toString() + " ");
- }
- strBuilder.append(" | ");
- }
- strBuilder.append("\n");
- return strBuilder.toString();
- }
+ @Override
+ public String printKeys(MultiComparator cmp, ISerializerDeserializer[] fields) throws HyracksDataException {
+ StringBuilder strBuilder = new StringBuilder();
+ int tupleCount = buf.getInt(tupleCountOff);
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ for (int i = 0; i < tupleCount; i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = fields[j].deserialize(dataIn);
+ strBuilder.append(o.toString() + " ");
+ }
+ strBuilder.append(" | ");
+ }
+ strBuilder.append("\n");
+ return strBuilder.toString();
+ }
- @Override
- public boolean findBestChild(ITupleReference tuple, MultiComparator cmp) {
- cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ @Override
+ public boolean findBestChild(ITupleReference tuple, MultiComparator cmp) {
+ cmpFrameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int bestChild = 0;
- double minEnlargedArea = Double.MAX_VALUE;
+ int bestChild = 0;
+ double minEnlargedArea = Double.MAX_VALUE;
- // the children pointers in the node point to leaves
- if (getLevel() == 1) {
- // find least overlap enlargement, use minimum enlarged area to
- // break tie, if tie still exists use minimum area to break it
- for (int i = 0; i < getTupleCount(); ++i) {
- frameTuple.resetByTupleIndex(this, i);
- double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
- tupleEntries1.add(i, enlargedArea);
- if (enlargedArea < minEnlargedArea) {
- minEnlargedArea = enlargedArea;
- bestChild = i;
- }
- }
- if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon()
- || minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
- minEnlargedArea = Double.MAX_VALUE;
- int k;
- if (getTupleCount() > nearMinimumOverlapFactor) {
- // sort the entries based on their area enlargement needed
- // to include the object
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount());
- k = nearMinimumOverlapFactor;
- } else {
- k = getTupleCount();
- }
+ // the children pointers in the node point to leaves
+ if (getLevel() == 1) {
+ // find least overlap enlargement, use minimum enlarged area to
+ // break tie, if tie still exists use minimum area to break it
+ for (int i = 0; i < getTupleCount(); ++i) {
+ frameTuple.resetByTupleIndex(this, i);
+ double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+ tupleEntries1.add(i, enlargedArea);
+ if (enlargedArea < minEnlargedArea) {
+ minEnlargedArea = enlargedArea;
+ bestChild = i;
+ }
+ }
+ if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon() || minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
+ minEnlargedArea = Double.MAX_VALUE;
+ int k;
+ if (getTupleCount() > nearMinimumOverlapFactor) {
+ // sort the entries based on their area enlargement needed
+ // to include the object
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount());
+ k = nearMinimumOverlapFactor;
+ } else {
+ k = getTupleCount();
+ }
- double minOverlap = Double.MAX_VALUE;
- int id = 0;
- for (int i = 0; i < k; ++i) {
- double difference = 0.0;
- for (int j = 0; j < getTupleCount(); ++j) {
- frameTuple.resetByTupleIndex(this, j);
- cmpFrameTuple.resetByTupleIndex(this, tupleEntries1
- .get(i).getTupleIndex());
+ double minOverlap = Double.MAX_VALUE;
+ int id = 0;
+ for (int i = 0; i < k; ++i) {
+ double difference = 0.0;
+ for (int j = 0; j < getTupleCount(); ++j) {
+ frameTuple.resetByTupleIndex(this, j);
+ cmpFrameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
- int c = pointerCmp(frameTuple, cmpFrameTuple, cmp);
- if (c != 0) {
- double intersection = overlappedArea(frameTuple,
- tuple, cmpFrameTuple, cmp);
- if (intersection != 0.0) {
- difference += intersection
- - overlappedArea(frameTuple, null,
- cmpFrameTuple, cmp);
- }
- } else {
- id = j;
- }
- }
+ int c = pointerCmp(frameTuple, cmpFrameTuple, cmp);
+ if (c != 0) {
+ double intersection = overlappedArea(frameTuple, tuple, cmpFrameTuple, cmp);
+ if (intersection != 0.0) {
+ difference += intersection - overlappedArea(frameTuple, null, cmpFrameTuple, cmp);
+ }
+ } else {
+ id = j;
+ }
+ }
- double enlargedArea = enlargedArea(cmpFrameTuple, tuple,
- cmp);
- if (difference < minOverlap) {
- minOverlap = difference;
- minEnlargedArea = enlargedArea;
- bestChild = id;
- } else if (difference == minOverlap) {
- if (enlargedArea < minEnlargedArea) {
- minEnlargedArea = enlargedArea;
- bestChild = id;
- } else if (enlargedArea == minEnlargedArea) {
- double area = area(cmpFrameTuple, cmp);
- frameTuple.resetByTupleIndex(this, bestChild);
- double minArea = area(frameTuple, cmp);
- if (area < minArea) {
- bestChild = id;
- }
- }
- }
- }
- }
- } else { // find minimum enlarged area, use minimum area to break tie
- for (int i = 0; i < getTupleCount(); i++) {
- frameTuple.resetByTupleIndex(this, i);
- double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
- if (enlargedArea < minEnlargedArea) {
- minEnlargedArea = enlargedArea;
- bestChild = i;
- } else if (enlargedArea == minEnlargedArea) {
- double area = area(frameTuple, cmp);
- frameTuple.resetByTupleIndex(this, bestChild);
- double minArea = area(frameTuple, cmp);
- if (area < minArea) {
- bestChild = i;
- }
- }
- }
- }
- tupleEntries1.clear();
+ double enlargedArea = enlargedArea(cmpFrameTuple, tuple, cmp);
+ if (difference < minOverlap) {
+ minOverlap = difference;
+ minEnlargedArea = enlargedArea;
+ bestChild = id;
+ } else if (difference == minOverlap) {
+ if (enlargedArea < minEnlargedArea) {
+ minEnlargedArea = enlargedArea;
+ bestChild = id;
+ } else if (enlargedArea == minEnlargedArea) {
+ double area = area(cmpFrameTuple, cmp);
+ frameTuple.resetByTupleIndex(this, bestChild);
+ double minArea = area(frameTuple, cmp);
+ if (area < minArea) {
+ bestChild = id;
+ }
+ }
+ }
+ }
+ }
+ } else { // find minimum enlarged area, use minimum area to break tie
+ for (int i = 0; i < getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ double enlargedArea = enlargedArea(frameTuple, tuple, cmp);
+ if (enlargedArea < minEnlargedArea) {
+ minEnlargedArea = enlargedArea;
+ bestChild = i;
+ } else if (enlargedArea == minEnlargedArea) {
+ double area = area(frameTuple, cmp);
+ frameTuple.resetByTupleIndex(this, bestChild);
+ double minArea = area(frameTuple, cmp);
+ if (area < minArea) {
+ bestChild = i;
+ }
+ }
+ }
+ }
+ tupleEntries1.clear();
- frameTuple.resetByTupleIndex(this, bestChild);
- if (minEnlargedArea > 0.0) {
- return true;
- } else {
- return false;
- }
- }
+ frameTuple.resetByTupleIndex(this, bestChild);
+ if (minEnlargedArea > 0.0) {
+ return true;
+ } else {
+ return false;
+ }
+ }
- @Override
- public int getBestChildPageId(MultiComparator cmp) {
- return buf.getInt(getChildPointerOff(frameTuple, cmp));
- }
+ @Override
+ public int getBestChildPageId(MultiComparator cmp) {
+ return buf.getInt(getChildPointerOff(frameTuple, cmp));
+ }
- @Override
- public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp) {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- for (int i = 0; i < getTupleCount(); i++) {
- frameTuple.resetByTupleIndex(this, i);
- int c = pointerCmp(frameTuple, tuple, cmp);
- if (c == 0) {
- return i;
- }
- }
- return -1;
- }
+ @Override
+ public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ for (int i = 0; i < getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(this, i);
+ int c = pointerCmp(frameTuple, tuple, cmp);
+ if (c == 0) {
+ return i;
+ }
+ }
+ return -1;
+ }
- @Override
- public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex,
- MultiComparator cmp) {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- frameTuple.resetByTupleIndex(this, tupleIndex);
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
- tuple.getFieldStart(i), tuple.getFieldLength(i),
- frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
- frameTuple.getFieldLength(j));
- if (c > 0) {
- return -1;
- }
- c = cmp.getComparators()[i].compare(tuple.getFieldData(j),
- tuple.getFieldStart(j), tuple.getFieldLength(j),
- frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
- frameTuple.getFieldLength(i));
- if (c < 0) {
- return -1;
- }
- }
- return buf.getInt(getChildPointerOff(frameTuple, cmp));
- }
+ @Override
+ public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.resetByTupleIndex(this, tupleIndex);
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j));
+ if (c > 0) {
+ return -1;
+ }
+ c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+ frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ if (c < 0) {
+ return -1;
+ }
+ }
+ return buf.getInt(getChildPointerOff(frameTuple, cmp));
+ }
- @Override
- public int findTupleByPointer(ITupleReference tuple, PathList traverseList,
- int parentIndex, MultiComparator cmp) {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- for (int i = 0; i < getTupleCount(); i++) {
- frameTuple.resetByTupleIndex(this, i);
+ @Override
+ public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ for (int i = 0; i < getTupleCount(); i++) {
+ frameTuple.resetByTupleIndex(this, i);
- int c = pointerCmp(frameTuple, tuple, cmp);
- if (c == 0) {
- return i;
- } else {
- int pageId = IntegerSerializerDeserializer.getInt(
- frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
- getChildPointerOff(frameTuple, cmp));
- traverseList.add(pageId, -1, parentIndex);
- }
- }
- return -1;
- }
+ int c = pointerCmp(frameTuple, tuple, cmp);
+ if (c == 0) {
+ return i;
+ } else {
+ int pageId = IntegerSerializerDeserializer.getInt(frameTuple.getFieldData(cmp.getKeyFieldCount() - 1),
+ getChildPointerOff(frameTuple, cmp));
+ traverseList.add(pageId, -1, parentIndex);
+ }
+ }
+ return -1;
+ }
- @Override
- public boolean compact(MultiComparator cmp) {
- resetSpaceParams();
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ @Override
+ public boolean compact(MultiComparator cmp) {
+ resetSpaceParams();
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int tupleCount = buf.getInt(tupleCountOff);
- int freeSpace = buf.getInt(freeSpaceOff);
+ int tupleCount = buf.getInt(tupleCountOff);
+ int freeSpace = buf.getInt(freeSpaceOff);
- ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
- sortedTupleOffs.ensureCapacity(tupleCount);
- for (int i = 0; i < tupleCount; i++) {
- int slotOff = slotManager.getSlotOff(i);
- int tupleOff = slotManager.getTupleOff(slotOff);
- sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
- }
- Collections.sort(sortedTupleOffs);
+ ArrayList<SlotOffTupleOff> sortedTupleOffs = new ArrayList<SlotOffTupleOff>();
+ sortedTupleOffs.ensureCapacity(tupleCount);
+ for (int i = 0; i < tupleCount; i++) {
+ int slotOff = slotManager.getSlotOff(i);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ sortedTupleOffs.add(new SlotOffTupleOff(i, slotOff, tupleOff));
+ }
+ Collections.sort(sortedTupleOffs);
- for (int i = 0; i < sortedTupleOffs.size(); i++) {
- int tupleOff = sortedTupleOffs.get(i).tupleOff;
- frameTuple.resetByTupleOffset(buf, tupleOff);
+ for (int i = 0; i < sortedTupleOffs.size(); i++) {
+ int tupleOff = sortedTupleOffs.get(i).tupleOff;
+ frameTuple.resetByTupleOffset(buf, tupleOff);
- int tupleEndOff = frameTuple.getFieldStart(frameTuple
- .getFieldCount() - 1)
- + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
- int tupleLength = tupleEndOff - tupleOff + childPtrSize;
- System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace,
- tupleLength);
+ int tupleEndOff = frameTuple.getFieldStart(frameTuple.getFieldCount() - 1)
+ + frameTuple.getFieldLength(frameTuple.getFieldCount() - 1);
+ int tupleLength = tupleEndOff - tupleOff + childPtrSize;
+ System.arraycopy(buf.array(), tupleOff, buf.array(), freeSpace, tupleLength);
- slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
- freeSpace += tupleLength;
- }
+ slotManager.setSlot(sortedTupleOffs.get(i).slotOff, freeSpace);
+ freeSpace += tupleLength;
+ }
- buf.putInt(freeSpaceOff, freeSpace);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount
- * slotManager.getSlotSize());
+ buf.putInt(freeSpaceOff, freeSpace);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
- return false;
- }
+ return false;
+ }
- @Override
- public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple,
- MultiComparator cmp) {
- int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize; // for
- // the
- // child
- // pointer
- if (bytesRequired + slotManager.getSlotSize() <= buf.capacity()
- - buf.getInt(freeSpaceOff)
- - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
- return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
- else if (bytesRequired + slotManager.getSlotSize() <= buf
- .getInt(totalFreeSpaceOff))
- return FrameOpSpaceStatus.SUFFICIENT_SPACE;
- else
- return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
- }
+ @Override
+ public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple, MultiComparator cmp) {
+ int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize; // for
+ // the
+ // child
+ // pointer
+ if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
+ - (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
+ return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
+ else if (bytesRequired + slotManager.getSlotSize() <= buf.getInt(totalFreeSpaceOff))
+ return FrameOpSpaceStatus.SUFFICIENT_SPACE;
+ else
+ return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
+ }
- @Override
- public void adjustKey(ITupleReference tuple, int tupleIndex,
- MultiComparator cmp) {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- if (tupleIndex == -1) {
- tupleIndex = findTupleByPointer(tuple, cmp);
- }
- if (tupleIndex != -1) {
- tupleWriter.writeTuple(tuple, buf, getTupleOffset(tupleIndex));
- }
- }
+ @Override
+ public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ if (tupleIndex == -1) {
+ tupleIndex = findTupleByPointer(tuple, cmp);
+ }
+ if (tupleIndex != -1) {
+ tupleWriter.writeTuple(tuple, buf.array(), getTupleOffset(tupleIndex));
+ }
+ }
- private int pointerCmp(ITupleReference tupleA, ITupleReference tupleB,
- MultiComparator cmp) {
- return cmp.getIntCmp().compare(
- tupleA.getFieldData(cmp.getKeyFieldCount() - 1),
- getChildPointerOff(tupleA, cmp), childPtrSize,
- tupleB.getFieldData(cmp.getKeyFieldCount() - 1),
- getChildPointerOff(tupleB, cmp), childPtrSize);
- }
+ private int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp) {
+ return cmp.getIntCmp().compare(tupleA.getFieldData(cmp.getKeyFieldCount() - 1),
+ getChildPointerOff(tupleA, cmp), childPtrSize, tupleB.getFieldData(cmp.getKeyFieldCount() - 1),
+ getChildPointerOff(tupleB, cmp), childPtrSize);
+ }
- @Override
- public int split(ITreeIndexFrame rightFrame, ITupleReference tuple,
- MultiComparator cmp, ISplitKey splitKey) throws Exception {
+ @Override
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
+ throws Exception {
- rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
- RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
- RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame
- .getTupleWriter());
+ RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
- // calculations are based on the R*-tree paper
- int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
- int splitDistribution = getTupleCount() - (2 * m) + 2;
+ // calculations are based on the R*-tree paper
+ int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+ int splitDistribution = getTupleCount() - (2 * m) + 2;
- // to calculate the minimum margin in order to pick the split axis
- double minMargin = Double.MAX_VALUE;
- int splitAxis = 0, sortOrder = 0;
+ // to calculate the minimum margin in order to pick the split axis
+ double minMargin = Double.MAX_VALUE;
+ int splitAxis = 0, sortOrder = 0;
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- for (int k = 0; k < getTupleCount(); ++k) {
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ for (int k = 0; k < getTupleCount(); ++k) {
- frameTuple.resetByTupleIndex(this, k);
- double LowerKey = cmp.getValueProviders()[i]
- .getValue(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i));
- double UpperKey = cmp.getValueProviders()[j]
- .getValue(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j));
+ frameTuple.resetByTupleIndex(this, k);
+ double LowerKey = cmp.getValueProviders()[i].getValue(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i));
+ double UpperKey = cmp.getValueProviders()[j].getValue(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j));
- tupleEntries1.add(k, LowerKey);
- tupleEntries2.add(k, UpperKey);
- }
- double LowerKey = cmp.getValueProviders()[i].getValue(
- tuple.getFieldData(i), tuple.getFieldStart(i));
- double UpperKey = cmp.getValueProviders()[j].getValue(
- tuple.getFieldData(j), tuple.getFieldStart(j));
+ tupleEntries1.add(k, LowerKey);
+ tupleEntries2.add(k, UpperKey);
+ }
+ double LowerKey = cmp.getValueProviders()[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+ double UpperKey = cmp.getValueProviders()[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
- tupleEntries1.add(-1, LowerKey);
- tupleEntries2.add(-1, UpperKey);
+ tupleEntries1.add(-1, LowerKey);
+ tupleEntries2.add(-1, UpperKey);
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- double lowerMargin = 0.0, upperMargin = 0.0;
- // generate distribution
- for (int k = 1; k <= splitDistribution; ++k) {
- int d = m - 1 + k;
+ double lowerMargin = 0.0, upperMargin = 0.0;
+ // generate distribution
+ for (int k = 1; k <= splitDistribution; ++k) {
+ int d = m - 1 + k;
- generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
- generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
- generateDist(tuple, tupleEntries1, rec[2], d,
- getTupleCount() + 1, cmp);
- generateDist(tuple, tupleEntries2, rec[3], d,
- getTupleCount() + 1, cmp);
+ generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+ generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1, cmp);
+ generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1, cmp);
- // calculate the margin of the distributions
- lowerMargin += rec[0].margin() + rec[2].margin();
- upperMargin += rec[1].margin() + rec[3].margin();
- }
- double margin = Math.min(lowerMargin, upperMargin);
+ // calculate the margin of the distributions
+ lowerMargin += rec[0].margin() + rec[2].margin();
+ upperMargin += rec[1].margin() + rec[3].margin();
+ }
+ double margin = Math.min(lowerMargin, upperMargin);
- // store minimum margin as split axis
- if (margin < minMargin) {
- minMargin = margin;
- splitAxis = i;
- sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
- }
+ // store minimum margin as split axis
+ if (margin < minMargin) {
+ minMargin = margin;
+ splitAxis = i;
+ sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+ }
- tupleEntries1.clear();
- tupleEntries2.clear();
- }
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ }
- for (int i = 0; i < getTupleCount(); ++i) {
- frameTuple.resetByTupleIndex(this, i);
- double key = cmp.getValueProviders()[splitAxis + sortOrder]
- .getValue(frameTuple.getFieldData(splitAxis + sortOrder),
- frameTuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(i, key);
- }
- double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
- tuple.getFieldData(splitAxis + sortOrder),
- tuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(-1, key);
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ for (int i = 0; i < getTupleCount(); ++i) {
+ frameTuple.resetByTupleIndex(this, i);
+ double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
+ frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(i, key);
+ }
+ double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
+ tuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(-1, key);
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- double minArea = Double.MAX_VALUE;
- double minOverlap = Double.MAX_VALUE;
- int splitPoint = 0;
- for (int i = 1; i <= splitDistribution; ++i) {
- int d = m - 1 + i;
+ double minArea = Double.MAX_VALUE;
+ double minOverlap = Double.MAX_VALUE;
+ int splitPoint = 0;
+ for (int i = 1; i <= splitDistribution; ++i) {
+ int d = m - 1 + i;
- generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
- generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1,
- cmp);
+ generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1, cmp);
- double overlap = rec[0].overlappedArea(rec[2]);
- if (overlap < minOverlap) {
- splitPoint = d;
- minOverlap = overlap;
- minArea = rec[0].area() + rec[2].area();
- } else if (overlap == minOverlap) {
- double area = rec[0].area() + rec[2].area();
- if (area < minArea) {
- splitPoint = d;
- minArea = area;
- }
- }
- }
- int startIndex, endIndex;
- if (splitPoint < (getTupleCount() + 1) / 2) {
- startIndex = 0;
- endIndex = splitPoint;
- } else {
- startIndex = splitPoint;
- endIndex = (getTupleCount() + 1);
- }
- boolean tupleInserted = false;
- int totalBytes = 0, numOfDeletedTuples = 0;
- for (int i = startIndex; i < endIndex; i++) {
- if (tupleEntries1.get(i).getTupleIndex() != -1) {
- frameTuple.resetByTupleIndex(this, tupleEntries1.get(i)
- .getTupleIndex());
- rightFrame.insert(frameTuple, cmp, -1);
- ((UnorderedSlotManager) slotManager).modifySlot(slotManager
- .getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
- totalBytes += tupleWriter.bytesRequired(frameTuple)
- + childPtrSize;
- numOfDeletedTuples++;
- } else {
- rightFrame.insert(tuple, cmp, -1);
- tupleInserted = true;
- }
- }
+ double overlap = rec[0].overlappedArea(rec[2]);
+ if (overlap < minOverlap) {
+ splitPoint = d;
+ minOverlap = overlap;
+ minArea = rec[0].area() + rec[2].area();
+ } else if (overlap == minOverlap) {
+ double area = rec[0].area() + rec[2].area();
+ if (area < minArea) {
+ splitPoint = d;
+ minArea = area;
+ }
+ }
+ }
+ int startIndex, endIndex;
+ if (splitPoint < (getTupleCount() + 1) / 2) {
+ startIndex = 0;
+ endIndex = splitPoint;
+ } else {
+ startIndex = splitPoint;
+ endIndex = (getTupleCount() + 1);
+ }
+ boolean tupleInserted = false;
+ int totalBytes = 0, numOfDeletedTuples = 0;
+ for (int i = startIndex; i < endIndex; i++) {
+ if (tupleEntries1.get(i).getTupleIndex() != -1) {
+ frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+ rightFrame.insert(frameTuple, cmp, -1);
+ ((UnorderedSlotManager) slotManager).modifySlot(
+ slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+ totalBytes += tupleWriter.bytesRequired(frameTuple) + childPtrSize;
+ numOfDeletedTuples++;
+ } else {
+ rightFrame.insert(tuple, cmp, -1);
+ tupleInserted = true;
+ }
+ }
- ((UnorderedSlotManager) slotManager).deleteEmptySlots();
+ ((UnorderedSlotManager) slotManager).deleteEmptySlots();
- // maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
- + totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
+ // maintain space information
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+ + (slotManager.getSlotSize() * numOfDeletedTuples));
- // compact both pages
- rightFrame.compact(cmp);
- compact(cmp);
+ // compact both pages
+ rightFrame.compact(cmp);
+ compact(cmp);
- if (!tupleInserted) {
- insert(tuple, cmp, -1);
- }
+ if (!tupleInserted) {
+ insert(tuple, cmp, -1);
+ }
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0,
- cmp.getKeyFieldCount());
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
- splitKey.initData(splitKeySize);
- this.adjustMBR(tuples, cmp);
- rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0,
- rTreeSplitKey.getLeftPageBuffer(), 0);
- rTreeSplitKey.getLeftTuple().resetByTupleOffset(
- rTreeSplitKey.getLeftPageBuffer(), 0);
+ splitKey.initData(splitKeySize);
+ this.adjustMBR(tuples, cmp);
+ rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
- ((IRTreeFrame) rightFrame).adjustMBR(
- ((RTreeNSMFrame) rightFrame).getTuples(), cmp);
- rTreeTupleWriterRightFrame.writeTupleFields(
- ((RTreeNSMFrame) rightFrame).getTuples(), 0,
- rTreeSplitKey.getRightPageBuffer(), 0);
- rTreeSplitKey.getRightTuple().resetByTupleOffset(
- rTreeSplitKey.getRightPageBuffer(), 0);
+ ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples(), cmp);
+ rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+ rTreeSplitKey.getRightPageBuffer(), 0);
+ rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
- tupleEntries1.clear();
- tupleEntries2.clear();
- return 0;
- }
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ return 0;
+ }
- private int getChildPointerOff(ITupleReference tuple, MultiComparator cmp) {
- return tuple.getFieldStart(cmp.getKeyFieldCount() - 1)
- + tuple.getFieldLength(cmp.getKeyFieldCount() - 1);
- }
+ private int getChildPointerOff(ITupleReference tuple, MultiComparator cmp) {
+ return tuple.getFieldStart(cmp.getKeyFieldCount() - 1) + tuple.getFieldLength(cmp.getKeyFieldCount() - 1);
+ }
- @Override
- public void insert(ITupleReference tuple, MultiComparator cmp,
- int tupleIndex) throws Exception {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
- int freeSpace = buf.getInt(freeSpaceOff);
- int bytesWritten = tupleWriter.writeTupleFields(tuple, 0,
- cmp.getKeyFieldCount(), buf, freeSpace);
- System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1),
- getChildPointerOff(tuple, cmp), buf.array(), freeSpace
- + bytesWritten, childPtrSize);
- int tupleSize = bytesWritten + childPtrSize;
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+ int freeSpace = buf.getInt(freeSpaceOff);
+ int bytesWritten = tupleWriter.writeTupleFields(tuple, 0, cmp.getKeyFieldCount(), buf, freeSpace);
+ System.arraycopy(tuple.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tuple, cmp), buf.array(),
+ freeSpace + bytesWritten, childPtrSize);
+ int tupleSize = bytesWritten + childPtrSize;
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize
- - slotManager.getSlotSize());
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + tupleSize);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - tupleSize - slotManager.getSlotSize());
- }
+ }
- @Override
- public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- int slotOff = slotManager.getSlotOff(tupleIndex);
+ @Override
+ public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ int slotOff = slotManager.getSlotOff(tupleIndex);
- int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int tupleSize = tupleWriter.bytesRequired(frameTuple);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int tupleSize = tupleWriter.bytesRequired(frameTuple);
- // perform deletion (we just do a memcpy to overwrite the slot)
- int slotStartOff = slotManager.getSlotEndOff();
- int length = slotOff - slotStartOff;
- System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff
- + slotManager.getSlotSize(), length);
+ // perform deletion (we just do a memcpy to overwrite the slot)
+ int slotStartOff = slotManager.getSlotEndOff();
+ int length = slotOff - slotStartOff;
+ System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
- // maintain space information
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize
- + childPtrSize + slotManager.getSlotSize());
- }
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff,
+ buf.getInt(totalFreeSpaceOff) + tupleSize + childPtrSize + slotManager.getSlotSize());
+ }
- @Override
- public boolean recomputeMBR(ITupleReference tuple, int tupleIndex,
- MultiComparator cmp) {
- frameTuple.setFieldCount(cmp.getKeyFieldCount());
- frameTuple.resetByTupleIndex(this, tupleIndex);
+ @Override
+ public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getKeyFieldCount());
+ frameTuple.resetByTupleIndex(this, tupleIndex);
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i), frameTuple.getFieldLength(i),
- tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
- if (c != 0) {
- return true;
- }
- c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j), frameTuple.getFieldLength(j),
- tuple.getFieldData(j), tuple.getFieldStart(j),
- tuple.getFieldLength(j));
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+ frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ if (c != 0) {
+ return true;
+ }
+ c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+ tuple.getFieldLength(j));
- if (c != 0) {
- return true;
- }
- }
- return false;
- }
+ if (c != 0) {
+ return true;
+ }
+ }
+ return false;
+ }
- private double overlappedArea(ITupleReference tuple1,
- ITupleReference tupleToBeInserted, ITupleReference tuple2,
- MultiComparator cmp) {
- double area = 1.0;
- double f1, f2;
+ private double overlappedArea(ITupleReference tuple1, ITupleReference tupleToBeInserted, ITupleReference tuple2,
+ MultiComparator cmp) {
+ double area = 1.0;
+ double f1, f2;
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- double pHigh1, pLow1;
- if (tupleToBeInserted != null) {
- int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i),
- tuple1.getFieldStart(i), tuple1.getFieldLength(i),
- tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i),
- tupleToBeInserted.getFieldLength(i));
- if (c < 0) {
- pLow1 = cmp.getValueProviders()[i].getValue(
- tuple1.getFieldData(i), tuple1.getFieldStart(i));
- } else {
- pLow1 = cmp.getValueProviders()[i].getValue(
- tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i));
- }
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ double pHigh1, pLow1;
+ if (tupleToBeInserted != null) {
+ int c = cmp.getComparators()[i].compare(tuple1.getFieldData(i), tuple1.getFieldStart(i),
+ tuple1.getFieldLength(i), tupleToBeInserted.getFieldData(i),
+ tupleToBeInserted.getFieldStart(i), tupleToBeInserted.getFieldLength(i));
+ if (c < 0) {
+ pLow1 = cmp.getValueProviders()[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+ } else {
+ pLow1 = cmp.getValueProviders()[i].getValue(tupleToBeInserted.getFieldData(i),
+ tupleToBeInserted.getFieldStart(i));
+ }
- c = cmp.getComparators()[j].compare(tuple1.getFieldData(j),
- tuple1.getFieldStart(j), tuple1.getFieldLength(j),
- tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j),
- tupleToBeInserted.getFieldLength(j));
- if (c > 0) {
- pHigh1 = cmp.getValueProviders()[j].getValue(
- tuple1.getFieldData(j), tuple1.getFieldStart(j));
- } else {
- pHigh1 = cmp.getValueProviders()[j].getValue(
- tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j));
- }
- } else {
- pLow1 = cmp.getValueProviders()[i].getValue(
- tuple1.getFieldData(i), tuple1.getFieldStart(i));
- pHigh1 = cmp.getValueProviders()[j].getValue(
- tuple1.getFieldData(j), tuple1.getFieldStart(j));
- }
+ c = cmp.getComparators()[j].compare(tuple1.getFieldData(j), tuple1.getFieldStart(j),
+ tuple1.getFieldLength(j), tupleToBeInserted.getFieldData(j),
+ tupleToBeInserted.getFieldStart(j), tupleToBeInserted.getFieldLength(j));
+ if (c > 0) {
+ pHigh1 = cmp.getValueProviders()[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+ } else {
+ pHigh1 = cmp.getValueProviders()[j].getValue(tupleToBeInserted.getFieldData(j),
+ tupleToBeInserted.getFieldStart(j));
+ }
+ } else {
+ pLow1 = cmp.getValueProviders()[i].getValue(tuple1.getFieldData(i), tuple1.getFieldStart(i));
+ pHigh1 = cmp.getValueProviders()[j].getValue(tuple1.getFieldData(j), tuple1.getFieldStart(j));
+ }
- double pLow2 = cmp.getValueProviders()[i].getValue(
- tuple2.getFieldData(i), tuple2.getFieldStart(i));
- double pHigh2 = cmp.getValueProviders()[j].getValue(
- tuple2.getFieldData(j), tuple2.getFieldStart(j));
+ double pLow2 = cmp.getValueProviders()[i].getValue(tuple2.getFieldData(i), tuple2.getFieldStart(i));
+ double pHigh2 = cmp.getValueProviders()[j].getValue(tuple2.getFieldData(j), tuple2.getFieldStart(j));
- if (pLow1 > pHigh2 || pHigh1 < pLow2) {
- return 0.0;
- }
+ if (pLow1 > pHigh2 || pHigh1 < pLow2) {
+ return 0.0;
+ }
- f1 = Math.max(pLow1, pLow2);
- f2 = Math.min(pHigh1, pHigh2);
- area *= f2 - f1;
- }
- return area;
- }
+ f1 = Math.max(pLow1, pLow2);
+ f2 = Math.min(pHigh1, pHigh2);
+ area *= f2 - f1;
+ }
+ return area;
+ }
- private double enlargedArea(ITupleReference tuple,
- ITupleReference tupleToBeInserted, MultiComparator cmp) {
- double areaBeforeEnlarge = area(tuple, cmp);
- double areaAfterEnlarge = 1.0;
+ private double enlargedArea(ITupleReference tuple, ITupleReference tupleToBeInserted, MultiComparator cmp) {
+ double areaBeforeEnlarge = area(tuple, cmp);
+ double areaAfterEnlarge = 1.0;
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- double pHigh, pLow;
- int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
- tuple.getFieldStart(i), tuple.getFieldLength(i),
- tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i),
- tupleToBeInserted.getFieldLength(i));
- if (c < 0) {
- pLow = cmp.getValueProviders()[i].getValue(
- tuple.getFieldData(i), tuple.getFieldStart(i));
- } else {
- pLow = cmp.getValueProviders()[i].getValue(
- tupleToBeInserted.getFieldData(i),
- tupleToBeInserted.getFieldStart(i));
- }
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ double pHigh, pLow;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tupleToBeInserted.getFieldData(i), tupleToBeInserted.getFieldStart(i),
+ tupleToBeInserted.getFieldLength(i));
+ if (c < 0) {
+ pLow = cmp.getValueProviders()[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+ } else {
+ pLow = cmp.getValueProviders()[i].getValue(tupleToBeInserted.getFieldData(i),
+ tupleToBeInserted.getFieldStart(i));
+ }
- c = cmp.getComparators()[j].compare(tuple.getFieldData(j),
- tuple.getFieldStart(j), tuple.getFieldLength(j),
- tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j),
- tupleToBeInserted.getFieldLength(j));
- if (c > 0) {
- pHigh = cmp.getValueProviders()[j].getValue(
- tuple.getFieldData(j), tuple.getFieldStart(j));
- } else {
- pHigh = cmp.getValueProviders()[j].getValue(
- tupleToBeInserted.getFieldData(j),
- tupleToBeInserted.getFieldStart(j));
- }
- areaAfterEnlarge *= pHigh - pLow;
- }
- return areaAfterEnlarge - areaBeforeEnlarge;
- }
+ c = cmp.getComparators()[j].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+ tupleToBeInserted.getFieldData(j), tupleToBeInserted.getFieldStart(j),
+ tupleToBeInserted.getFieldLength(j));
+ if (c > 0) {
+ pHigh = cmp.getValueProviders()[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
+ } else {
+ pHigh = cmp.getValueProviders()[j].getValue(tupleToBeInserted.getFieldData(j),
+ tupleToBeInserted.getFieldStart(j));
+ }
+ areaAfterEnlarge *= pHigh - pLow;
+ }
+ return areaAfterEnlarge - areaBeforeEnlarge;
+ }
- private double area(ITupleReference tuple, MultiComparator cmp) {
- double area = 1.0;
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- area *= cmp.getValueProviders()[j].getValue(tuple.getFieldData(j),
- tuple.getFieldStart(j))
- - cmp.getValueProviders()[i].getValue(
- tuple.getFieldData(i), tuple.getFieldStart(i));
- }
- return area;
- }
+ private double area(ITupleReference tuple, MultiComparator cmp) {
+ double area = 1.0;
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ area *= cmp.getValueProviders()[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j))
+ - cmp.getValueProviders()[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+ }
+ return area;
+ }
- @Override
- public void enlarge(ITupleReference tuple, MultiComparator cmp) {
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i), frameTuple.getFieldLength(i),
- tuple.getFieldData(i), tuple.getFieldStart(i),
- tuple.getFieldLength(i));
- if (c > 0) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i),
- frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i), tuple.getFieldLength(i));
- }
- c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j), frameTuple.getFieldLength(j),
- tuple.getFieldData(j), tuple.getFieldStart(j),
- tuple.getFieldLength(j));
- if (c < 0) {
- System.arraycopy(tuple.getFieldData(j), tuple.getFieldStart(j),
- frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j), tuple.getFieldLength(j));
- }
- }
- }
+ @Override
+ public void enlarge(ITupleReference tuple, MultiComparator cmp) {
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
+ frameTuple.getFieldLength(i), tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i));
+ if (c > 0) {
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ c = cmp.getComparators()[j].compare(frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j), tuple.getFieldData(j), tuple.getFieldStart(j),
+ tuple.getFieldLength(j));
+ if (c < 0) {
+ System.arraycopy(tuple.getFieldData(j), tuple.getFieldStart(j), frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), tuple.getFieldLength(j));
+ }
+ }
+ }
- @Override
- public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
- for (int i = 0; i < tuples.length; i++) {
- tuples[i].setFieldCount(cmp.getKeyFieldCount());
- tuples[i].resetByTupleIndex(this, 0);
- }
+ @Override
+ public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
+ for (int i = 0; i < tuples.length; i++) {
+ tuples[i].setFieldCount(cmp.getKeyFieldCount());
+ tuples[i].resetByTupleIndex(this, 0);
+ }
- adjustMBRImpl(tuples, cmp);
- }
-}
\ No newline at end of file
+ adjustMBRImpl(tuples, cmp);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
index 100b4d8..227d351 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -21,23 +21,25 @@
public class RTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
- private static final long serialVersionUID = 1L;
- private ITreeIndexTupleWriterFactory tupleWriterFactory;
- private int keyFieldCount;
+ private static final long serialVersionUID = 1L;
+ private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private final int keyFieldCount;
- public RTreeNSMInteriorFrameFactory(
- ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
- this.tupleWriterFactory = tupleWriterFactory;
- if (keyFieldCount % 2 != 0) {
- throw new IllegalArgumentException(
- "The key has different number of dimensions.");
- }
- this.keyFieldCount = keyFieldCount;
- }
+ public RTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ if (keyFieldCount % 2 != 0) {
+ throw new IllegalArgumentException("The key has different number of dimensions.");
+ }
+ this.keyFieldCount = keyFieldCount;
+ }
+
+ @Override
+ public IRTreeInteriorFrame createFrame() {
+ return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
+ }
@Override
- public IRTreeInteriorFrame createFrame() {
- return new RTreeNSMInteriorFrame(
- tupleWriterFactory.createTupleWriter(), keyFieldCount);
+ public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+ return tupleWriterFactory;
}
-}
\ No newline at end of file
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
index fa2ad32..ed366ca 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -30,263 +30,236 @@
public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
- public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter,
- int keyFieldCount) {
- super(tupleWriter, keyFieldCount);
- }
+ public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ super(tupleWriter, keyFieldCount);
+ }
- @Override
- public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) {
- frameTuple.setFieldCount(cmp.getFieldCount());
- return slotManager.findTupleIndex(tuple, frameTuple, cmp, null, null);
- }
+ @Override
+ public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ return slotManager.findTupleIndex(tuple, frameTuple, cmp, null, null);
+ }
- @Override
- public boolean intersect(ITupleReference tuple, int tupleIndex,
- MultiComparator cmp) {
- frameTuple.setFieldCount(cmp.getFieldCount());
- frameTuple.resetByTupleIndex(this, tupleIndex);
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
- tuple.getFieldStart(i), tuple.getFieldLength(i),
- frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
- frameTuple.getFieldLength(j));
- if (c > 0) {
- return false;
- }
- c = cmp.getComparators()[i].compare(tuple.getFieldData(j),
- tuple.getFieldStart(j), tuple.getFieldLength(j),
- frameTuple.getFieldData(i), frameTuple.getFieldStart(i),
- frameTuple.getFieldLength(i));
+ @Override
+ public boolean intersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ frameTuple.resetByTupleIndex(this, tupleIndex);
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), frameTuple.getFieldData(j), frameTuple.getFieldStart(j),
+ frameTuple.getFieldLength(j));
+ if (c > 0) {
+ return false;
+ }
+ c = cmp.getComparators()[i].compare(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j),
+ frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
- if (c < 0) {
- return false;
- }
- }
- return true;
- }
+ if (c < 0) {
+ return false;
+ }
+ }
+ return true;
+ }
- @Override
- public int split(ITreeIndexFrame rightFrame, ITupleReference tuple,
- MultiComparator cmp, ISplitKey splitKey) throws Exception {
+ @Override
+ public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
+ throws Exception {
- rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
- frameTuple.setFieldCount(cmp.getFieldCount());
+ rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ frameTuple.setFieldCount(cmp.getFieldCount());
- RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
- RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame
- .getTupleWriter());
+ RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
- // calculations are based on the R*-tree paper
- int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
- int splitDistribution = getTupleCount() - (2 * m) + 2;
+ // calculations are based on the R*-tree paper
+ int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+ int splitDistribution = getTupleCount() - (2 * m) + 2;
- // to calculate the minimum margin in order to pick the split axis
- double minMargin = Double.MAX_VALUE;
- int splitAxis = 0, sortOrder = 0;
+ // to calculate the minimum margin in order to pick the split axis
+ double minMargin = Double.MAX_VALUE;
+ int splitAxis = 0, sortOrder = 0;
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- for (int k = 0; k < getTupleCount(); ++k) {
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ for (int k = 0; k < getTupleCount(); ++k) {
- frameTuple.resetByTupleIndex(this, k);
+ frameTuple.resetByTupleIndex(this, k);
- double LowerKey = cmp.getValueProviders()[i]
- .getValue(frameTuple.getFieldData(i),
- frameTuple.getFieldStart(i));
- double UpperKey = cmp.getValueProviders()[j]
- .getValue(frameTuple.getFieldData(j),
- frameTuple.getFieldStart(j));
+ double LowerKey = cmp.getValueProviders()[i].getValue(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i));
+ double UpperKey = cmp.getValueProviders()[j].getValue(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j));
- tupleEntries1.add(k, LowerKey);
- tupleEntries2.add(k, UpperKey);
- }
- double LowerKey = cmp.getValueProviders()[i].getValue(
- tuple.getFieldData(i), tuple.getFieldStart(i));
- double UpperKey = cmp.getValueProviders()[j].getValue(
- tuple.getFieldData(j), tuple.getFieldStart(j));
+ tupleEntries1.add(k, LowerKey);
+ tupleEntries2.add(k, UpperKey);
+ }
+ double LowerKey = cmp.getValueProviders()[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+ double UpperKey = cmp.getValueProviders()[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
- tupleEntries1.add(-1, LowerKey);
- tupleEntries2.add(-1, UpperKey);
+ tupleEntries1.add(-1, LowerKey);
+ tupleEntries2.add(-1, UpperKey);
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- double lowerMargin = 0.0, upperMargin = 0.0;
- // generate distribution
- for (int k = 1; k <= splitDistribution; ++k) {
- int d = m - 1 + k;
+ double lowerMargin = 0.0, upperMargin = 0.0;
+ // generate distribution
+ for (int k = 1; k <= splitDistribution; ++k) {
+ int d = m - 1 + k;
- generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
- generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
- generateDist(tuple, tupleEntries1, rec[2], d,
- getTupleCount() + 1, cmp);
- generateDist(tuple, tupleEntries2, rec[3], d,
- getTupleCount() + 1, cmp);
+ generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+ generateDist(tuple, tupleEntries2, rec[1], 0, d, cmp);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1, cmp);
+ generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1, cmp);
- // calculate the margin of the distributions
- lowerMargin += rec[0].margin() + rec[2].margin();
- upperMargin += rec[1].margin() + rec[3].margin();
- }
- double margin = Math.min(lowerMargin, upperMargin);
+ // calculate the margin of the distributions
+ lowerMargin += rec[0].margin() + rec[2].margin();
+ upperMargin += rec[1].margin() + rec[3].margin();
+ }
+ double margin = Math.min(lowerMargin, upperMargin);
- // store minimum margin as split axis
- if (margin < minMargin) {
- minMargin = margin;
- splitAxis = i;
- sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
- }
+ // store minimum margin as split axis
+ if (margin < minMargin) {
+ minMargin = margin;
+ splitAxis = i;
+ sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+ }
- tupleEntries1.clear();
- tupleEntries2.clear();
- }
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ }
- for (int i = 0; i < getTupleCount(); ++i) {
- frameTuple.resetByTupleIndex(this, i);
- double key = cmp.getValueProviders()[splitAxis + sortOrder]
- .getValue(frameTuple.getFieldData(splitAxis + sortOrder),
- frameTuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(i, key);
- }
- double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
- tuple.getFieldData(splitAxis + sortOrder),
- tuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(-1, key);
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ for (int i = 0; i < getTupleCount(); ++i) {
+ frameTuple.resetByTupleIndex(this, i);
+ double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(
+ frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(i, key);
+ }
+ double key = cmp.getValueProviders()[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
+ tuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(-1, key);
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- double minArea = Double.MAX_VALUE;
- double minOverlap = Double.MAX_VALUE;
- int splitPoint = 0;
- for (int i = 1; i <= splitDistribution; ++i) {
- int d = m - 1 + i;
+ double minArea = Double.MAX_VALUE;
+ double minOverlap = Double.MAX_VALUE;
+ int splitPoint = 0;
+ for (int i = 1; i <= splitDistribution; ++i) {
+ int d = m - 1 + i;
- generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
- generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1,
- cmp);
+ generateDist(tuple, tupleEntries1, rec[0], 0, d, cmp);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1, cmp);
- double overlap = rec[0].overlappedArea(rec[2]);
- if (overlap < minOverlap) {
- splitPoint = d;
- minOverlap = overlap;
- minArea = rec[0].area() + rec[2].area();
- } else if (overlap == minOverlap) {
- double area = rec[0].area() + rec[2].area();
- if (area < minArea) {
- splitPoint = d;
- minArea = area;
- }
- }
- }
- int startIndex, endIndex;
- if (splitPoint < (getTupleCount() + 1) / 2) {
- startIndex = 0;
- endIndex = splitPoint;
- } else {
- startIndex = splitPoint;
- endIndex = (getTupleCount() + 1);
- }
- boolean tupleInserted = false;
- int totalBytes = 0, numOfDeletedTuples = 0;
- for (int i = startIndex; i < endIndex; i++) {
- if (tupleEntries1.get(i).getTupleIndex() != -1) {
- frameTuple.resetByTupleIndex(this, tupleEntries1.get(i)
- .getTupleIndex());
- rightFrame.insert(frameTuple, cmp, -1);
- ((UnorderedSlotManager) slotManager).modifySlot(slotManager
- .getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
- totalBytes += tupleWriter.bytesRequired(frameTuple);
- numOfDeletedTuples++;
- } else {
- rightFrame.insert(tuple, cmp, -1);
- tupleInserted = true;
- }
- }
+ double overlap = rec[0].overlappedArea(rec[2]);
+ if (overlap < minOverlap) {
+ splitPoint = d;
+ minOverlap = overlap;
+ minArea = rec[0].area() + rec[2].area();
+ } else if (overlap == minOverlap) {
+ double area = rec[0].area() + rec[2].area();
+ if (area < minArea) {
+ splitPoint = d;
+ minArea = area;
+ }
+ }
+ }
+ int startIndex, endIndex;
+ if (splitPoint < (getTupleCount() + 1) / 2) {
+ startIndex = 0;
+ endIndex = splitPoint;
+ } else {
+ startIndex = splitPoint;
+ endIndex = (getTupleCount() + 1);
+ }
+ boolean tupleInserted = false;
+ int totalBytes = 0, numOfDeletedTuples = 0;
+ for (int i = startIndex; i < endIndex; i++) {
+ if (tupleEntries1.get(i).getTupleIndex() != -1) {
+ frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+ rightFrame.insert(frameTuple, cmp, -1);
+ ((UnorderedSlotManager) slotManager).modifySlot(
+ slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+ totalBytes += tupleWriter.bytesRequired(frameTuple);
+ numOfDeletedTuples++;
+ } else {
+ rightFrame.insert(tuple, cmp, -1);
+ tupleInserted = true;
+ }
+ }
- ((UnorderedSlotManager) slotManager).deleteEmptySlots();
+ ((UnorderedSlotManager) slotManager).deleteEmptySlots();
- // maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
- + totalBytes + (slotManager.getSlotSize() * numOfDeletedTuples));
+ // maintain space information
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+ + (slotManager.getSlotSize() * numOfDeletedTuples));
- // compact both pages
- rightFrame.compact(cmp);
- compact(cmp);
+ // compact both pages
+ rightFrame.compact(cmp);
+ compact(cmp);
- if (!tupleInserted) {
- insert(tuple, cmp, -1);
- }
+ if (!tupleInserted) {
+ insert(tuple, cmp, -1);
+ }
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0,
- cmp.getKeyFieldCount());
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, cmp.getKeyFieldCount());
- splitKey.initData(splitKeySize);
- this.adjustMBR(tuples, cmp);
- rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0,
- rTreeSplitKey.getLeftPageBuffer(), 0);
- rTreeSplitKey.getLeftTuple().resetByTupleOffset(
- rTreeSplitKey.getLeftPageBuffer(), 0);
+ splitKey.initData(splitKeySize);
+ this.adjustMBR(tuples, cmp);
+ rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
- ((IRTreeFrame) rightFrame).adjustMBR(
- ((RTreeNSMFrame) rightFrame).getTuples(), cmp);
- rTreeTupleWriterRightFrame.writeTupleFields(
- ((RTreeNSMFrame) rightFrame).getTuples(), 0,
- rTreeSplitKey.getRightPageBuffer(), 0);
- rTreeSplitKey.getRightTuple().resetByTupleOffset(
- rTreeSplitKey.getRightPageBuffer(), 0);
+ ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples(), cmp);
+ rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+ rTreeSplitKey.getRightPageBuffer(), 0);
+ rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
- tupleEntries1.clear();
- tupleEntries2.clear();
- return 0;
- }
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ return 0;
+ }
- @Override
- public void insert(ITupleReference tuple, MultiComparator cmp,
- int tupleIndex) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
- int bytesWritten = tupleWriter.writeTuple(tuple, buf,
- buf.getInt(freeSpaceOff));
+ @Override
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ slotManager.insertSlot(-1, buf.getInt(freeSpaceOff));
+ int bytesWritten = tupleWriter.writeTuple(tuple, buf.array(), buf.getInt(freeSpaceOff));
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
- buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff)
- - bytesWritten - slotManager.getSlotSize());
- }
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
+ buf.putInt(freeSpaceOff, buf.getInt(freeSpaceOff) + bytesWritten);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) - bytesWritten - slotManager.getSlotSize());
+ }
- @Override
- public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- int slotOff = slotManager.getSlotOff(tupleIndex);
+ @Override
+ public void delete(int tupleIndex, MultiComparator cmp) throws Exception {
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ int slotOff = slotManager.getSlotOff(tupleIndex);
- int tupleOff = slotManager.getTupleOff(slotOff);
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int tupleSize = tupleWriter.bytesRequired(frameTuple);
+ int tupleOff = slotManager.getTupleOff(slotOff);
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int tupleSize = tupleWriter.bytesRequired(frameTuple);
- // perform deletion (we just do a memcpy to overwrite the slot)
- int slotStartOff = slotManager.getSlotEndOff();
- int length = slotOff - slotStartOff;
- System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff
- + slotManager.getSlotSize(), length);
+ // perform deletion (we just do a memcpy to overwrite the slot)
+ int slotStartOff = slotManager.getSlotEndOff();
+ int length = slotOff - slotStartOff;
+ System.arraycopy(buf.array(), slotStartOff, buf.array(), slotStartOff + slotManager.getSlotSize(), length);
- // maintain space information
- buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize
- + slotManager.getSlotSize());
- }
+ // maintain space information
+ buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) - 1);
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + tupleSize + slotManager.getSlotSize());
+ }
- @Override
- public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
- for (int i = 0; i < tuples.length; i++) {
- tuples[i].setFieldCount(cmp.getFieldCount());
- tuples[i].resetByTupleIndex(this, 0);
- }
+ @Override
+ public void adjustMBR(ITreeIndexTupleReference[] tuples, MultiComparator cmp) {
+ for (int i = 0; i < tuples.length; i++) {
+ tuples[i].setFieldCount(cmp.getFieldCount());
+ tuples[i].resetByTupleIndex(this, 0);
+ }
- adjustMBRImpl(tuples, cmp);
- }
-}
\ No newline at end of file
+ adjustMBRImpl(tuples, cmp);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
index 51a047e..d12c0b1 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -21,23 +21,25 @@
public class RTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
- private static final long serialVersionUID = 1L;
- private ITreeIndexTupleWriterFactory tupleWriterFactory;
- private int keyFieldCount;
+ private static final long serialVersionUID = 1L;
+ private final ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private final int keyFieldCount;
- public RTreeNSMLeafFrameFactory(
- ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
- this.tupleWriterFactory = tupleWriterFactory;
- if (keyFieldCount % 2 != 0) {
- throw new IllegalArgumentException(
- "The key has different number of dimensions.");
- }
- this.keyFieldCount = keyFieldCount;
- }
+ public RTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ if (keyFieldCount % 2 != 0) {
+ throw new IllegalArgumentException("The key has different number of dimensions.");
+ }
+ this.keyFieldCount = keyFieldCount;
+ }
+
+ @Override
+ public IRTreeLeafFrame createFrame() {
+ return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
+ }
@Override
- public IRTreeLeafFrame createFrame() {
- return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(),
- keyFieldCount);
+ public ITreeIndexTupleWriterFactory getTupleWriterFactory() {
+ return tupleWriterFactory;
}
-}
\ No newline at end of file
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
index 03b2062..97eefe0 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -47,972 +47,901 @@
public class RTree implements ITreeIndex {
- private boolean created = false;
- private boolean loaded = false;
- private final int rootPage = 1; // the root page never changes
+ private boolean created = false;
+ private boolean loaded = false;
+ private final int rootPage = 1; // the root page never changes
- private final AtomicInteger globalNsn; // Global node sequence number
- private int numOfPages = 1;
- private final ReadWriteLock treeLatch;
+ private final AtomicInteger globalNsn; // Global node sequence number
+ private int numOfPages = 1;
+ private final ReadWriteLock treeLatch;
- private final IFreePageManager freePageManager;
- private final IBufferCache bufferCache;
- private int fileId;
+ private final IFreePageManager freePageManager;
+ private final IBufferCache bufferCache;
+ private int fileId;
- private final SearchPredicate diskOrderScanPredicate;
- private final ITreeIndexFrameFactory interiorFrameFactory;
- private final ITreeIndexFrameFactory leafFrameFactory;
- private final MultiComparator cmp;
+ private final SearchPredicate diskOrderScanPredicate;
+ private final ITreeIndexFrameFactory interiorFrameFactory;
+ private final ITreeIndexFrameFactory leafFrameFactory;
+ private final MultiComparator cmp;
- public int rootSplits = 0;
- public int[] splitsByLevel = new int[500];
- public AtomicLong readLatchesAcquired = new AtomicLong();
- public AtomicLong readLatchesReleased = new AtomicLong();
- public AtomicLong writeLatchesAcquired = new AtomicLong();
- public AtomicLong writeLatchesReleased = new AtomicLong();
- public AtomicLong pins = new AtomicLong();
- public AtomicLong unpins = new AtomicLong();
- public byte currentLevel = 0;
+ public int rootSplits = 0;
+ public int[] splitsByLevel = new int[500];
+ public AtomicLong readLatchesAcquired = new AtomicLong();
+ public AtomicLong readLatchesReleased = new AtomicLong();
+ public AtomicLong writeLatchesAcquired = new AtomicLong();
+ public AtomicLong writeLatchesReleased = new AtomicLong();
+ public AtomicLong pins = new AtomicLong();
+ public AtomicLong unpins = new AtomicLong();
+ public byte currentLevel = 0;
- public RTree(IBufferCache bufferCache, IFreePageManager freePageManager,
- ITreeIndexFrameFactory interiorFrameFactory,
- ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
- this.bufferCache = bufferCache;
- this.freePageManager = freePageManager;
- this.interiorFrameFactory = interiorFrameFactory;
- this.leafFrameFactory = leafFrameFactory;
- this.cmp = cmp;
- globalNsn = new AtomicInteger();
- this.treeLatch = new ReentrantReadWriteLock(true);
- this.diskOrderScanPredicate = new SearchPredicate(null, cmp);
- }
+ public RTree(IBufferCache bufferCache, IFreePageManager freePageManager,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
+ this.bufferCache = bufferCache;
+ this.freePageManager = freePageManager;
+ this.interiorFrameFactory = interiorFrameFactory;
+ this.leafFrameFactory = leafFrameFactory;
+ this.cmp = cmp;
+ globalNsn = new AtomicInteger();
+ this.treeLatch = new ReentrantReadWriteLock(true);
+ this.diskOrderScanPredicate = new SearchPredicate(null, cmp);
+ }
- public void incrementGlobalNsn() {
- globalNsn.incrementAndGet();
- }
+ public void incrementGlobalNsn() {
+ globalNsn.incrementAndGet();
+ }
- public int getGlobalNsn() {
- return globalNsn.get();
- }
+ public int getGlobalNsn() {
+ return globalNsn.get();
+ }
- public void incrementReadLatchesAcquired() {
- readLatchesAcquired.incrementAndGet();
- }
+ public void incrementReadLatchesAcquired() {
+ readLatchesAcquired.incrementAndGet();
+ }
- public void incrementReadLatchesReleased() {
- readLatchesReleased.incrementAndGet();
- }
+ public void incrementReadLatchesReleased() {
+ readLatchesReleased.incrementAndGet();
+ }
- public void incrementWriteLatchesAcquired() {
- writeLatchesAcquired.incrementAndGet();
- }
+ public void incrementWriteLatchesAcquired() {
+ writeLatchesAcquired.incrementAndGet();
+ }
- public void incrementWriteLatchesReleased() {
- writeLatchesReleased.incrementAndGet();
- }
+ public void incrementWriteLatchesReleased() {
+ writeLatchesReleased.incrementAndGet();
+ }
- public void incrementPins() {
- pins.incrementAndGet();
- }
+ public void incrementPins() {
+ pins.incrementAndGet();
+ }
- public void incrementUnpins() {
- unpins.incrementAndGet();
- }
+ public void incrementUnpins() {
+ unpins.incrementAndGet();
+ }
- public String printStats() {
- StringBuilder strBuilder = new StringBuilder();
- strBuilder.append("\n");
- strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
- strBuilder.append("SPLITS BY LEVEL\n");
- for (int i = 0; i < currentLevel; i++) {
- strBuilder.append(String.format("%3d ", i)
- + String.format("%8d ", splitsByLevel[i]) + "\n");
- }
- strBuilder.append(String.format("READ LATCHES: %10d %10d\n",
- readLatchesAcquired.get(), readLatchesReleased.get()));
- strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n",
- writeLatchesAcquired.get(), writeLatchesReleased.get()));
- strBuilder.append(String.format("PINS: %10d %10d\n",
- pins.get(), unpins.get()));
+ public String printStats() {
+ StringBuilder strBuilder = new StringBuilder();
+ strBuilder.append("\n");
+ strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
+ strBuilder.append("SPLITS BY LEVEL\n");
+ for (int i = 0; i < currentLevel; i++) {
+ strBuilder.append(String.format("%3d ", i) + String.format("%8d ", splitsByLevel[i]) + "\n");
+ }
+ strBuilder.append(String.format("READ LATCHES: %10d %10d\n", readLatchesAcquired.get(),
+ readLatchesReleased.get()));
+ strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n", writeLatchesAcquired.get(),
+ writeLatchesReleased.get()));
+ strBuilder.append(String.format("PINS: %10d %10d\n", pins.get(), unpins.get()));
- strBuilder.append(String.format("Num of Pages: %10d\n",
- numOfPages));
+ strBuilder.append(String.format("Num of Pages: %10d\n", numOfPages));
- return strBuilder.toString();
- }
+ return strBuilder.toString();
+ }
- public void printTree(IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
- ISerializerDeserializer[] fields) throws Exception {
- printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
- }
+ public void printTree(IRTreeFrame leafFrame, IRTreeFrame interiorFrame, ISerializerDeserializer[] fields)
+ throws Exception {
+ printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
+ }
- public void printTree(int pageId, ICachedPage parent, boolean unpin,
- IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
- ISerializerDeserializer[] fields) throws Exception {
+ public void printTree(int pageId, ICachedPage parent, boolean unpin, IRTreeFrame leafFrame,
+ IRTreeFrame interiorFrame, ISerializerDeserializer[] fields) throws Exception {
- ICachedPage node = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, pageId), false);
- incrementPins();
- node.acquireReadLatch();
- incrementReadLatchesAcquired();
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
- try {
- if (parent != null && unpin == true) {
- parent.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(parent);
- incrementUnpins();
- }
+ try {
+ if (parent != null && unpin == true) {
+ parent.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(parent);
+ incrementUnpins();
+ }
- interiorFrame.setPage(node);
- int level = interiorFrame.getLevel();
+ interiorFrame.setPage(node);
+ int level = interiorFrame.getLevel();
- System.out.format("%1d ", level);
- System.out.format("%3d ", pageId);
- for (int i = 0; i < currentLevel - level; i++)
- System.out.format(" ");
+ System.out.format("%1d ", level);
+ System.out.format("%3d ", pageId);
+ for (int i = 0; i < currentLevel - level; i++)
+ System.out.format(" ");
- String keyString;
- if (interiorFrame.isLeaf()) {
- leafFrame.setPage(node);
- keyString = leafFrame.printKeys(cmp, fields);
- } else {
- keyString = interiorFrame.printKeys(cmp, fields);
- }
+ String keyString;
+ if (interiorFrame.isLeaf()) {
+ leafFrame.setPage(node);
+ keyString = leafFrame.printKeys(cmp, fields);
+ } else {
+ keyString = interiorFrame.printKeys(cmp, fields);
+ }
- System.out.format(keyString);
- if (!interiorFrame.isLeaf()) {
- ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame))
- .getChildren(cmp);
- for (int i = 0; i < children.size(); i++) {
- printTree(children.get(i), node, i == children.size() - 1,
- leafFrame, interiorFrame, fields);
- }
- } else {
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- }
- } catch (Exception e) {
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- throw e;
- }
- }
+ System.out.format(keyString);
+ if (!interiorFrame.isLeaf()) {
+ ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame)).getChildren(cmp);
+ for (int i = 0; i < children.size(); i++) {
+ printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
+ }
+ } else {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
+ } catch (Exception e) {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ throw e;
+ }
+ }
- @Override
- public void create(int fileId, ITreeIndexFrame leafFrame,
- ITreeIndexMetaDataFrame metaFrame) throws Exception {
- if (created)
- return;
+ @Override
+ public void create(int fileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception {
+ if (created)
+ return;
- treeLatch.writeLock().lock();
- try {
- // check if another thread beat us to it
- if (created)
- return;
+ treeLatch.writeLock().lock();
+ try {
+ // check if another thread beat us to it
+ if (created)
+ return;
- freePageManager.init(metaFrame, rootPage);
+ freePageManager.init(metaFrame, rootPage);
- // initialize root page
- ICachedPage rootNode = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
- incrementPins();
+ // initialize root page
+ ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
+ incrementPins();
- rootNode.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- try {
- leafFrame.setPage(rootNode);
- leafFrame.initBuffer((byte) 0);
- } finally {
- rootNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(rootNode);
- incrementUnpins();
- }
- currentLevel = 0;
+ rootNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ try {
+ leafFrame.setPage(rootNode);
+ leafFrame.initBuffer((byte) 0);
+ } finally {
+ rootNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(rootNode);
+ incrementUnpins();
+ }
+ currentLevel = 0;
- created = true;
- } finally {
- treeLatch.writeLock().unlock();
- }
- }
+ created = true;
+ } finally {
+ treeLatch.writeLock().unlock();
+ }
+ }
- public void open(int fileId) {
- this.fileId = fileId;
- }
+ public void open(int fileId) {
+ this.fileId = fileId;
+ }
- public void close() {
- fileId = -1;
- }
+ public void close() {
+ fileId = -1;
+ }
- @Override
- public RTreeOpContext createOpContext(IndexOp op,
- ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
- ITreeIndexMetaDataFrame metaFrame) {
- return new RTreeOpContext(op, (IRTreeLeafFrame) leafFrame,
- (IRTreeInteriorFrame) interiorFrame, metaFrame, 8);
- }
+ @Override
+ public RTreeOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) {
+ return new RTreeOpContext(op, (IRTreeLeafFrame) leafFrame, (IRTreeInteriorFrame) interiorFrame, metaFrame, 8);
+ }
- @Override
- public void insert(ITupleReference tuple, IndexOpContext ictx)
- throws Exception {
- RTreeOpContext ctx = (RTreeOpContext) ictx;
- ctx.reset();
- ctx.setTuple(tuple);
- ctx.splitKey.reset();
- ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
- ctx.splitKey.getRightTuple().setFieldCount(cmp.getKeyFieldCount());
- ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
- ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ @Override
+ public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ RTreeOpContext ctx = (RTreeOpContext) ictx;
+ ctx.reset();
+ ctx.setTuple(tuple);
+ ctx.splitKey.reset();
+ ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
+ ctx.splitKey.getRightTuple().setFieldCount(cmp.getKeyFieldCount());
+ ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
- int maxFieldPos = cmp.getKeyFieldCount() / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- int c = cmp.getComparators()[i].compare(tuple.getFieldData(i),
- tuple.getFieldStart(i), tuple.getFieldLength(i),
- tuple.getFieldData(j), tuple.getFieldStart(j),
- tuple.getFieldLength(j));
- if (c > 0) {
- throw new IllegalArgumentException(
- "The low key point has larger coordinates than the high key point.");
- }
- }
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ if (c > 0) {
+ throw new IllegalArgumentException("The low key point has larger coordinates than the high key point.");
+ }
+ }
- ICachedPage leafNode = findLeaf(ctx);
+ ICachedPage leafNode = findLeaf(ctx);
- int pageId = ctx.pathList.getLastPageId();
- ctx.pathList.moveLast();
- insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+ int pageId = ctx.pathList.getLastPageId();
+ ctx.pathList.moveLast();
+ insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
- while (true) {
- if (ctx.splitKey.getLeftPageBuffer() != null) {
- updateParentForInsert(ctx);
- } else {
- break;
- }
- }
+ while (true) {
+ if (ctx.splitKey.getLeftPageBuffer() != null) {
+ updateParentForInsert(ctx);
+ } else {
+ break;
+ }
+ }
- leafNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(leafNode);
- incrementUnpins();
- }
+ leafNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(leafNode);
+ incrementUnpins();
+ }
- public ICachedPage findLeaf(RTreeOpContext ctx) throws Exception {
- int pageId = rootPage;
- boolean writeLatched = false;
- ICachedPage node = null;
- boolean isLeaf = false;
- int pageLsn = 0, parentLsn = 0;
+ public ICachedPage findLeaf(RTreeOpContext ctx) throws Exception {
+ int pageId = rootPage;
+ boolean writeLatched = false;
+ ICachedPage node = null;
+ boolean isLeaf = false;
+ int pageLsn = 0, parentLsn = 0;
- while (true) {
- if (!writeLatched) {
- node = bufferCache
- .pin(BufferedFileHandle.getDiskPageId(fileId, pageId),
- false);
- incrementPins();
- ctx.interiorFrame.setPage(node);
- isLeaf = ctx.interiorFrame.isLeaf();
- if (isLeaf) {
- node.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- writeLatched = true;
+ while (true) {
+ if (!writeLatched) {
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ ctx.interiorFrame.setPage(node);
+ isLeaf = ctx.interiorFrame.isLeaf();
+ if (isLeaf) {
+ node.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ writeLatched = true;
- if (!ctx.interiorFrame.isLeaf()) {
- node.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- writeLatched = false;
- continue;
- }
- } else {
- // Be optimistic and grab read latch first. We will swap it
- // to write latch if we need to enlarge the best child
- // tuple.
- node.acquireReadLatch();
- incrementReadLatchesAcquired();
- }
- }
+ if (!ctx.interiorFrame.isLeaf()) {
+ node.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ writeLatched = false;
+ continue;
+ }
+ } else {
+ // Be optimistic and grab read latch first. We will swap it
+ // to write latch if we need to enlarge the best child
+ // tuple.
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
+ }
+ }
- if (pageId != rootPage
- && parentLsn < ctx.interiorFrame.getPageNsn()) {
- // Concurrent split detected, go back to parent and re-choose
- // the best child
- if (writeLatched) {
- node.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- writeLatched = false;
- } else {
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- }
+ if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+ // Concurrent split detected, go back to parent and re-choose
+ // the best child
+ if (writeLatched) {
+ node.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ writeLatched = false;
+ } else {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
- pageId = ctx.pathList.getLastPageId();
- if (pageId != rootPage) {
- parentLsn = ctx.pathList
- .getPageLsn(ctx.pathList.size() - 2);
- }
- ctx.pathList.moveLast();
- continue;
- }
+ pageId = ctx.pathList.getLastPageId();
+ if (pageId != rootPage) {
+ parentLsn = ctx.pathList.getPageLsn(ctx.pathList.size() - 2);
+ }
+ ctx.pathList.moveLast();
+ continue;
+ }
- pageLsn = ctx.interiorFrame.getPageLsn();
- ctx.pathList.add(pageId, pageLsn, -1);
+ pageLsn = ctx.interiorFrame.getPageLsn();
+ ctx.pathList.add(pageId, pageLsn, -1);
- if (!isLeaf) {
- // findBestChild must be called *before* getBestChildPageId
- ctx.interiorFrame.findBestChild(ctx.getTuple(), cmp);
- int childPageId = ctx.interiorFrame.getBestChildPageId(cmp);
+ if (!isLeaf) {
+ // findBestChild must be called *before* getBestChildPageId
+ ctx.interiorFrame.findBestChild(ctx.getTuple(), cmp);
+ int childPageId = ctx.interiorFrame.getBestChildPageId(cmp);
- if (!writeLatched) {
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- // TODO: do we need to un-pin and pin again?
- bufferCache.unpin(node);
- incrementUnpins();
+ if (!writeLatched) {
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ // TODO: do we need to un-pin and pin again?
+ bufferCache.unpin(node);
+ incrementUnpins();
- node = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, pageId),
- false);
- incrementPins();
- node.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- ctx.interiorFrame.setPage(node);
- writeLatched = true;
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(node);
+ writeLatched = true;
- if (ctx.interiorFrame.getPageLsn() != pageLsn) {
- // The page was changed while we unlocked it; thus,
- // retry (re-choose best child)
+ if (ctx.interiorFrame.getPageLsn() != pageLsn) {
+ // The page was changed while we unlocked it; thus,
+ // retry (re-choose best child)
- ctx.pathList.moveLast();
- continue;
- }
- }
+ ctx.pathList.moveLast();
+ continue;
+ }
+ }
- // We don't need to reset the frameTuple because it is
- // already pointing to the best child
- ctx.interiorFrame.enlarge(ctx.getTuple(), cmp);
+ // We don't need to reset the frameTuple because it is
+ // already pointing to the best child
+ ctx.interiorFrame.enlarge(ctx.getTuple(), cmp);
- node.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- writeLatched = false;
+ node.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ writeLatched = false;
- pageId = childPageId;
- parentLsn = pageLsn;
- } else {
- ctx.leafFrame.setPage(node);
- return node;
- }
- }
- }
+ pageId = childPageId;
+ parentLsn = pageLsn;
+ } else {
+ ctx.leafFrame.setPage(node);
+ return node;
+ }
+ }
+ }
- private void insertTuple(ICachedPage node, int pageId,
- ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
- throws Exception {
- FrameOpSpaceStatus spaceStatus;
- if (!isLeaf) {
- spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
- } else {
- spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
- }
+ private void insertTuple(ICachedPage node, int pageId, ITupleReference tuple, RTreeOpContext ctx, boolean isLeaf)
+ throws Exception {
+ FrameOpSpaceStatus spaceStatus;
+ if (!isLeaf) {
+ spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
+ } else {
+ spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+ }
- switch (spaceStatus) {
- case SUFFICIENT_CONTIGUOUS_SPACE: {
- if (!isLeaf) {
- ctx.interiorFrame.insert(tuple, cmp, -1);
- incrementGlobalNsn();
- ctx.interiorFrame.setPageLsn(getGlobalNsn());
- } else {
- ctx.leafFrame.insert(tuple, cmp, -1);
- incrementGlobalNsn();
- ctx.leafFrame.setPageLsn(getGlobalNsn());
- }
- ctx.splitKey.reset();
- break;
- }
+ switch (spaceStatus) {
+ case SUFFICIENT_CONTIGUOUS_SPACE: {
+ if (!isLeaf) {
+ ctx.interiorFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.interiorFrame.setPageLsn(getGlobalNsn());
+ } else {
+ ctx.leafFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.leafFrame.setPageLsn(getGlobalNsn());
+ }
+ ctx.splitKey.reset();
+ break;
+ }
- case SUFFICIENT_SPACE: {
- if (!isLeaf) {
- ctx.interiorFrame.compact(cmp);
- ctx.interiorFrame.insert(tuple, cmp, -1);
- incrementGlobalNsn();
- ctx.interiorFrame.setPageLsn(getGlobalNsn());
- } else {
- ctx.leafFrame.compact(cmp);
- ctx.leafFrame.insert(tuple, cmp, -1);
- incrementGlobalNsn();
- ctx.leafFrame.setPageLsn(getGlobalNsn());
- }
- ctx.splitKey.reset();
- break;
- }
+ case SUFFICIENT_SPACE: {
+ if (!isLeaf) {
+ ctx.interiorFrame.compact(cmp);
+ ctx.interiorFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.interiorFrame.setPageLsn(getGlobalNsn());
+ } else {
+ ctx.leafFrame.compact(cmp);
+ ctx.leafFrame.insert(tuple, cmp, -1);
+ incrementGlobalNsn();
+ ctx.leafFrame.setPageLsn(getGlobalNsn());
+ }
+ ctx.splitKey.reset();
+ break;
+ }
- case INSUFFICIENT_SPACE: {
- int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
- ICachedPage rightNode = bufferCache
- .pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
- true);
- incrementPins();
- rightNode.acquireWriteLatch();
- incrementWriteLatchesAcquired();
+ case INSUFFICIENT_SPACE: {
+ int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
+ incrementPins();
+ rightNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
- try {
- IRTreeFrame rightFrame;
- int ret;
- numOfPages++; // debug
- if (!isLeaf) {
- splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
- rightFrame = (IRTreeFrame) interiorFrameFactory
- .createFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
- rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
- ret = ctx.interiorFrame.split(rightFrame, tuple, cmp,
- ctx.splitKey);
- ctx.interiorFrame.setRightPage(rightPageId);
- rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
- incrementGlobalNsn();
- int newNsn = getGlobalNsn();
- rightFrame.setPageLsn(newNsn);
- ctx.interiorFrame.setPageNsn(newNsn);
- ctx.interiorFrame.setPageLsn(newNsn);
- } else {
- splitsByLevel[0]++; // debug
- rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) 0);
- rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
- ret = ctx.leafFrame.split(rightFrame, tuple, cmp,
- ctx.splitKey);
- ctx.leafFrame.setRightPage(rightPageId);
- rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
- incrementGlobalNsn();
- int newNsn = getGlobalNsn();
- rightFrame.setPageLsn(newNsn);
- ctx.leafFrame.setPageNsn(newNsn);
- ctx.leafFrame.setPageLsn(newNsn);
- }
- if (ret != 0) {
- ctx.splitKey.reset();
- } else {
- ctx.splitKey.setPages(pageId, rightPageId);
- }
- if (pageId == rootPage) {
- rootSplits++; // debug
- splitsByLevel[currentLevel]++;
- currentLevel++;
+ try {
+ IRTreeFrame rightFrame;
+ int ret;
+ numOfPages++; // debug
+ if (!isLeaf) {
+ splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
+ rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+ rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ ret = ctx.interiorFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
+ ctx.interiorFrame.setRightPage(rightPageId);
+ rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
+ incrementGlobalNsn();
+ int newNsn = getGlobalNsn();
+ rightFrame.setPageLsn(newNsn);
+ ctx.interiorFrame.setPageNsn(newNsn);
+ ctx.interiorFrame.setPageLsn(newNsn);
+ } else {
+ splitsByLevel[0]++; // debug
+ rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) 0);
+ rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
+ ctx.leafFrame.setRightPage(rightPageId);
+ rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
+ incrementGlobalNsn();
+ int newNsn = getGlobalNsn();
+ rightFrame.setPageLsn(newNsn);
+ ctx.leafFrame.setPageNsn(newNsn);
+ ctx.leafFrame.setPageLsn(newNsn);
+ }
+ if (ret != 0) {
+ ctx.splitKey.reset();
+ } else {
+ ctx.splitKey.setPages(pageId, rightPageId);
+ }
+ if (pageId == rootPage) {
+ rootSplits++; // debug
+ splitsByLevel[currentLevel]++;
+ currentLevel++;
- int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
- ICachedPage newLeftNode = bufferCache
- .pin(BufferedFileHandle.getDiskPageId(fileId,
- newLeftId), true);
- incrementPins();
- newLeftNode.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- try {
- // copy left child to new left child
- System.arraycopy(node.getBuffer().array(), 0,
- newLeftNode.getBuffer().array(), 0, newLeftNode
- .getBuffer().capacity());
+ int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId),
+ true);
+ incrementPins();
+ newLeftNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ try {
+ // copy left child to new left child
+ System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0,
+ newLeftNode.getBuffer().capacity());
- // initialize new root (leftNode becomes new root)
- ctx.interiorFrame.setPage(node);
- ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame
- .getLevel() + 1));
+ // initialize new root (leftNode becomes new root)
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
- ctx.splitKey.setLeftPage(newLeftId);
+ ctx.splitKey.setLeftPage(newLeftId);
- ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(),
- cmp, -1);
- ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(),
- cmp, -1);
+ ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), cmp, -1);
+ ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), cmp, -1);
- incrementGlobalNsn();
- int newNsn = getGlobalNsn();
- ctx.interiorFrame.setPageLsn(newNsn);
- ctx.interiorFrame.setPageNsn(newNsn);
- } finally {
- newLeftNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(newLeftNode);
- incrementUnpins();
- }
+ incrementGlobalNsn();
+ int newNsn = getGlobalNsn();
+ ctx.interiorFrame.setPageLsn(newNsn);
+ ctx.interiorFrame.setPageNsn(newNsn);
+ } finally {
+ newLeftNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(newLeftNode);
+ incrementUnpins();
+ }
- ctx.splitKey.reset();
- }
- } finally {
- rightNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(rightNode);
- incrementUnpins();
- }
- break;
- }
- }
- }
+ ctx.splitKey.reset();
+ }
+ } finally {
+ rightNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(rightNode);
+ incrementUnpins();
+ }
+ break;
+ }
+ }
+ }
- public void updateParentForInsert(RTreeOpContext ctx) throws Exception {
- int parentId = ctx.pathList.getLastPageId();
- ICachedPage parentNode = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, parentId), false);
- incrementPins();
- parentNode.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- ctx.interiorFrame.setPage(parentNode);
- boolean foundParent = true;
+ public void updateParentForInsert(RTreeOpContext ctx) throws Exception {
+ int parentId = ctx.pathList.getLastPageId();
+ ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ boolean foundParent = true;
- if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
- foundParent = false;
- while (true) {
- if (ctx.interiorFrame.findTupleByPointer(
- ctx.splitKey.getLeftTuple(), cmp) != -1) {
- // found the parent
- foundParent = true;
- break;
- }
- int rightPage = ctx.interiorFrame.getRightPage();
- parentNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(parentNode);
- incrementUnpins();
+ if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+ foundParent = false;
+ while (true) {
+ if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp) != -1) {
+ // found the parent
+ foundParent = true;
+ break;
+ }
+ int rightPage = ctx.interiorFrame.getRightPage();
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
- if (rightPage == -1) {
- break;
- }
+ if (rightPage == -1) {
+ break;
+ }
- parentId = rightPage;
- parentNode = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, parentId),
- false);
- incrementPins();
- parentNode.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- ctx.interiorFrame.setPage(parentNode);
- }
- }
- if (foundParent) {
- ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
- insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(),
- ctx, ctx.interiorFrame.isLeaf());
- ctx.pathList.moveLast();
+ parentId = rightPage;
+ parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ }
+ }
+ if (foundParent) {
+ ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
+ insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
+ ctx.pathList.moveLast();
- parentNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(parentNode);
- incrementUnpins();
- return;
- }
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
+ return;
+ }
- // very rare situation when the there is a root split, do an exhaustive
- // breadth-first traversal looking for the parent tuple
+ // very rare situation when the there is a root split, do an exhaustive
+ // breadth-first traversal looking for the parent tuple
- ctx.pathList.clear();
- ctx.traverseList.clear();
- findPath(ctx);
- updateParentForInsert(ctx);
- }
+ ctx.pathList.clear();
+ ctx.traverseList.clear();
+ findPath(ctx);
+ updateParentForInsert(ctx);
+ }
- public void findPath(RTreeOpContext ctx) throws Exception {
- int pageId = rootPage;
- int parentIndex = -1;
- int parentLsn = 0;
- int pageLsn, pageIndex;
- ctx.traverseList.add(pageId, -1, parentIndex);
- while (!ctx.traverseList.isLast()) {
- pageId = ctx.traverseList.getFirstPageId();
- parentIndex = ctx.traverseList.getFirstPageIndex();
+ public void findPath(RTreeOpContext ctx) throws Exception {
+ int pageId = rootPage;
+ int parentIndex = -1;
+ int parentLsn = 0;
+ int pageLsn, pageIndex;
+ ctx.traverseList.add(pageId, -1, parentIndex);
+ while (!ctx.traverseList.isLast()) {
+ pageId = ctx.traverseList.getFirstPageId();
+ parentIndex = ctx.traverseList.getFirstPageIndex();
- ICachedPage node = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, pageId), false);
- incrementPins();
- node.acquireReadLatch();
- incrementReadLatchesAcquired();
- ctx.interiorFrame.setPage(node);
- pageLsn = ctx.interiorFrame.getPageLsn();
- pageIndex = ctx.traverseList.first();
- ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
+ ctx.interiorFrame.setPage(node);
+ pageLsn = ctx.interiorFrame.getPageLsn();
+ pageIndex = ctx.traverseList.first();
+ ctx.traverseList.setPageLsn(pageIndex, pageLsn);
- ctx.traverseList.moveFirst();
+ ctx.traverseList.moveFirst();
- if (pageId != rootPage
- && parentLsn < ctx.interiorFrame.getPageNsn()) {
- int rightPage = ctx.interiorFrame.getRightPage();
- if (rightPage != -1) {
- ctx.traverseList.add(rightPage, -1, parentIndex);
- }
- }
- parentLsn = pageLsn;
+ if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+ int rightPage = ctx.interiorFrame.getRightPage();
+ if (rightPage != -1) {
+ ctx.traverseList.add(rightPage, -1, parentIndex);
+ }
+ }
+ parentLsn = pageLsn;
- if (ctx.interiorFrame.findTupleByPointer(
- ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex,
- cmp) != -1) {
- fillPath(ctx, pageIndex);
+ if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex, cmp) != -1) {
+ fillPath(ctx, pageIndex);
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- return;
- }
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- }
- }
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ return;
+ }
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
+ }
- public void fillPath(RTreeOpContext ctx, int pageIndex) throws Exception {
- if (pageIndex != -1) {
- fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
- ctx.pathList.add(ctx.traverseList.getPageId(pageIndex),
- ctx.traverseList.getPageLsn(pageIndex), -1);
- }
- }
+ public void fillPath(RTreeOpContext ctx, int pageIndex) throws Exception {
+ if (pageIndex != -1) {
+ fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
+ ctx.pathList.add(ctx.traverseList.getPageId(pageIndex), ctx.traverseList.getPageLsn(pageIndex), -1);
+ }
+ }
- @Override
- public void delete(ITupleReference tuple, IndexOpContext ictx)
- throws Exception {
- RTreeOpContext ctx = (RTreeOpContext) ictx;
- ctx.reset();
- ctx.setTuple(tuple);
- ctx.splitKey.reset();
- ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
- ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
- ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ @Override
+ public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ RTreeOpContext ctx = (RTreeOpContext) ictx;
+ ctx.reset();
+ ctx.setTuple(tuple);
+ ctx.splitKey.reset();
+ ctx.splitKey.getLeftTuple().setFieldCount(cmp.getKeyFieldCount());
+ ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
- int tupleIndex = findTupleToDelete(ctx);
+ int tupleIndex = findTupleToDelete(ctx);
- if (tupleIndex != -1) {
- int pageId = ctx.pathList.getLastPageId();
- ctx.pathList.moveLast();
- deleteTuple(pageId, tupleIndex, ctx);
+ if (tupleIndex != -1) {
+ int pageId = ctx.pathList.getLastPageId();
+ ctx.pathList.moveLast();
+ deleteTuple(pageId, tupleIndex, ctx);
- while (true) {
- if (ctx.splitKey.getLeftPageBuffer() != null) {
- updateParentForDelete(ctx);
- } else {
- break;
- }
- }
+ while (true) {
+ if (ctx.splitKey.getLeftPageBuffer() != null) {
+ updateParentForDelete(ctx);
+ } else {
+ break;
+ }
+ }
- ctx.leafFrame.getPage().releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(ctx.leafFrame.getPage());
- incrementUnpins();
- }
- }
+ ctx.leafFrame.getPage().releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(ctx.leafFrame.getPage());
+ incrementUnpins();
+ }
+ }
- public void updateParentForDelete(RTreeOpContext ctx) throws Exception {
- int parentId = ctx.pathList.getLastPageId();
- ICachedPage parentNode = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, parentId), false);
- incrementPins();
- parentNode.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- ctx.interiorFrame.setPage(parentNode);
- boolean foundParent = true;
- int tupleIndex = -1;
+ public void updateParentForDelete(RTreeOpContext ctx) throws Exception {
+ int parentId = ctx.pathList.getLastPageId();
+ ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ boolean foundParent = true;
+ int tupleIndex = -1;
- if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
- foundParent = false;
- while (true) {
- tupleIndex = ctx.interiorFrame.findTupleByPointer(
- ctx.splitKey.getLeftTuple(), cmp);
- if (tupleIndex != -1) {
- // found the parent
- foundParent = true;
- break;
- }
- int rightPage = ctx.interiorFrame.getRightPage();
- parentNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(parentNode);
- incrementUnpins();
+ if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+ foundParent = false;
+ while (true) {
+ tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
+ if (tupleIndex != -1) {
+ // found the parent
+ foundParent = true;
+ break;
+ }
+ int rightPage = ctx.interiorFrame.getRightPage();
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
- if (rightPage == -1) {
- break;
- }
+ if (rightPage == -1) {
+ break;
+ }
- parentId = rightPage;
- parentNode = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, parentId),
- false);
- incrementPins();
- parentNode.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- ctx.interiorFrame.setPage(parentNode);
- }
- }
- if (foundParent) {
- if (tupleIndex == -1) {
- tupleIndex = ctx.interiorFrame.findTupleByPointer(
- ctx.splitKey.getLeftTuple(), cmp);
- }
- boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(
- ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
+ parentId = rightPage;
+ parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ incrementPins();
+ parentNode.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.interiorFrame.setPage(parentNode);
+ }
+ }
+ if (foundParent) {
+ if (tupleIndex == -1) {
+ tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), cmp);
+ }
+ boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
- if (recomputeMBR) {
- ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(),
- tupleIndex, cmp);
- ctx.pathList.moveLast();
+ if (recomputeMBR) {
+ ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
+ ctx.pathList.moveLast();
- incrementGlobalNsn();
- ctx.interiorFrame.setPageLsn(getGlobalNsn());
+ incrementGlobalNsn();
+ ctx.interiorFrame.setPageLsn(getGlobalNsn());
- ctx.splitKey.reset();
- if (!ctx.pathList.isEmpty()) {
- ctx.interiorFrame.computeMBR(ctx.splitKey, cmp);
- ctx.splitKey.setLeftPage(parentId);
- }
- } else {
- ctx.pathList.moveLast();
- ctx.splitKey.reset();
- }
+ ctx.splitKey.reset();
+ if (!ctx.pathList.isEmpty()) {
+ ctx.interiorFrame.computeMBR(ctx.splitKey, cmp);
+ ctx.splitKey.setLeftPage(parentId);
+ }
+ } else {
+ ctx.pathList.moveLast();
+ ctx.splitKey.reset();
+ }
- parentNode.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(parentNode);
- incrementUnpins();
- return;
- }
+ parentNode.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(parentNode);
+ incrementUnpins();
+ return;
+ }
- // very rare situation when the there is a root split, do an exhaustive
- // breadth-first traversal looking for the parent tuple
+ // very rare situation when the there is a root split, do an exhaustive
+ // breadth-first traversal looking for the parent tuple
- ctx.pathList.clear();
- ctx.traverseList.clear();
- findPath(ctx);
- updateParentForDelete(ctx);
- }
+ ctx.pathList.clear();
+ ctx.traverseList.clear();
+ findPath(ctx);
+ updateParentForDelete(ctx);
+ }
- public int findTupleToDelete(RTreeOpContext ctx) throws Exception {
+ public int findTupleToDelete(RTreeOpContext ctx) throws Exception {
- ctx.traverseList.add(rootPage, -1, -1);
- ctx.pathList.add(rootPage, -1, ctx.traverseList.size() - 1);
+ ctx.traverseList.add(rootPage, -1, -1);
+ ctx.pathList.add(rootPage, -1, ctx.traverseList.size() - 1);
- while (!ctx.pathList.isEmpty()) {
- int pageId = ctx.pathList.getLastPageId();
- int parentLsn = ctx.pathList.getLastPageLsn();
- int pageIndex = ctx.pathList.getLastPageIndex();
- ctx.pathList.moveLast();
- ICachedPage node = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, pageId), false);
- incrementPins();
- node.acquireReadLatch();
- incrementReadLatchesAcquired();
- ctx.interiorFrame.setPage(node);
- boolean isLeaf = ctx.interiorFrame.isLeaf();
- int pageLsn = ctx.interiorFrame.getPageLsn();
- int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
- ctx.traverseList.setPageLsn(pageIndex, pageLsn);
+ while (!ctx.pathList.isEmpty()) {
+ int pageId = ctx.pathList.getLastPageId();
+ int parentLsn = ctx.pathList.getLastPageLsn();
+ int pageIndex = ctx.pathList.getLastPageIndex();
+ ctx.pathList.moveLast();
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireReadLatch();
+ incrementReadLatchesAcquired();
+ ctx.interiorFrame.setPage(node);
+ boolean isLeaf = ctx.interiorFrame.isLeaf();
+ int pageLsn = ctx.interiorFrame.getPageLsn();
+ int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
+ ctx.traverseList.setPageLsn(pageIndex, pageLsn);
- if (pageId != rootPage
- && parentLsn < ctx.interiorFrame.getPageNsn()) {
- // Concurrent split detected, we need to visit the right page
- int rightPage = ctx.interiorFrame.getRightPage();
- if (rightPage != -1) {
- ctx.traverseList.add(rightPage, -1, parentIndex);
- ctx.pathList.add(rightPage, parentLsn,
- ctx.traverseList.size() - 1);
- }
- }
+ if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
+ // Concurrent split detected, we need to visit the right page
+ int rightPage = ctx.interiorFrame.getRightPage();
+ if (rightPage != -1) {
+ ctx.traverseList.add(rightPage, -1, parentIndex);
+ ctx.pathList.add(rightPage, parentLsn, ctx.traverseList.size() - 1);
+ }
+ }
- if (!isLeaf) {
- for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
- int childPageId = ctx.interiorFrame
- .getChildPageIdIfIntersect(ctx.tuple, i, cmp);
- if (childPageId != -1) {
- ctx.traverseList.add(childPageId, -1, pageIndex);
- ctx.pathList.add(childPageId, pageLsn,
- ctx.traverseList.size() - 1);
- }
- }
- } else {
- ctx.leafFrame.setPage(node);
- int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
- if (tupleIndex != -1) {
+ if (!isLeaf) {
+ for (int i = 0; i < ctx.interiorFrame.getTupleCount(); i++) {
+ int childPageId = ctx.interiorFrame.getChildPageIdIfIntersect(ctx.tuple, i, cmp);
+ if (childPageId != -1) {
+ ctx.traverseList.add(childPageId, -1, pageIndex);
+ ctx.pathList.add(childPageId, pageLsn, ctx.traverseList.size() - 1);
+ }
+ }
+ } else {
+ ctx.leafFrame.setPage(node);
+ int tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
+ if (tupleIndex != -1) {
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
- node = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, pageId),
- false);
- incrementPins();
- node.acquireWriteLatch();
- incrementWriteLatchesAcquired();
- ctx.leafFrame.setPage(node);
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ incrementPins();
+ node.acquireWriteLatch();
+ incrementWriteLatchesAcquired();
+ ctx.leafFrame.setPage(node);
- if (ctx.leafFrame.getPageLsn() != pageLsn) {
- // The page was changed while we unlocked it
+ if (ctx.leafFrame.getPageLsn() != pageLsn) {
+ // The page was changed while we unlocked it
- tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple,
- cmp);
- if (tupleIndex == -1) {
- ctx.traverseList.add(pageId, -1, parentIndex);
- ctx.pathList.add(pageId, parentLsn,
- ctx.traverseList.size() - 1);
+ tupleIndex = ctx.leafFrame.findTupleIndex(ctx.tuple, cmp);
+ if (tupleIndex == -1) {
+ ctx.traverseList.add(pageId, -1, parentIndex);
+ ctx.pathList.add(pageId, parentLsn, ctx.traverseList.size() - 1);
- node.releaseWriteLatch();
- incrementWriteLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- continue;
- } else {
- ctx.pathList.clear();
- fillPath(ctx, pageIndex);
- return tupleIndex;
- }
- } else {
- ctx.pathList.clear();
- fillPath(ctx, pageIndex);
- return tupleIndex;
- }
- }
- }
- node.releaseReadLatch();
- incrementReadLatchesReleased();
- bufferCache.unpin(node);
- incrementUnpins();
- }
- return -1;
- }
+ node.releaseWriteLatch();
+ incrementWriteLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ continue;
+ } else {
+ ctx.pathList.clear();
+ fillPath(ctx, pageIndex);
+ return tupleIndex;
+ }
+ } else {
+ ctx.pathList.clear();
+ fillPath(ctx, pageIndex);
+ return tupleIndex;
+ }
+ }
+ }
+ node.releaseReadLatch();
+ incrementReadLatchesReleased();
+ bufferCache.unpin(node);
+ incrementUnpins();
+ }
+ return -1;
+ }
- public void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx)
- throws Exception {
- ctx.leafFrame.delete(tupleIndex, cmp);
- incrementGlobalNsn();
- ctx.leafFrame.setPageLsn(getGlobalNsn());
+ public void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx) throws Exception {
+ ctx.leafFrame.delete(tupleIndex, cmp);
+ incrementGlobalNsn();
+ ctx.leafFrame.setPageLsn(getGlobalNsn());
- // if the page is empty, just leave it there for future inserts
- if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
- ctx.leafFrame.computeMBR(ctx.splitKey, cmp);
- ctx.splitKey.setLeftPage(pageId);
- }
- }
+ // if the page is empty, just leave it there for future inserts
+ if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
+ ctx.leafFrame.computeMBR(ctx.splitKey, cmp);
+ ctx.splitKey.setLeftPage(pageId);
+ }
+ }
- public void search(ITreeIndexCursor cursor, SearchPredicate pred,
- RTreeOpContext ctx) throws Exception {
- ctx.reset();
- ctx.cursor = cursor;
+ public void search(ITreeIndexCursor cursor, SearchPredicate pred, RTreeOpContext ctx) throws Exception {
+ ctx.reset();
+ ctx.cursor = cursor;
- cursor.setBufferCache(bufferCache);
- cursor.setFileId(fileId);
- ctx.cursorInitialState.setRootPage(rootPage);
- ctx.cursor.open(ctx.cursorInitialState, pred);
- }
+ cursor.setBufferCache(bufferCache);
+ cursor.setFileId(fileId);
+ ctx.cursorInitialState.setRootPage(rootPage);
+ ctx.cursor.open(ctx.cursorInitialState, pred);
+ }
- public ITreeIndexFrameFactory getInteriorFrameFactory() {
- return interiorFrameFactory;
- }
+ public ITreeIndexFrameFactory getInteriorFrameFactory() {
+ return interiorFrameFactory;
+ }
- public ITreeIndexFrameFactory getLeafFrameFactory() {
- return leafFrameFactory;
- }
+ public ITreeIndexFrameFactory getLeafFrameFactory() {
+ return leafFrameFactory;
+ }
- public MultiComparator getCmp() {
- return cmp;
- }
+ public MultiComparator getCmp() {
+ return cmp;
+ }
- public IFreePageManager getFreePageManager() {
- return freePageManager;
- }
+ public IFreePageManager getFreePageManager() {
+ return freePageManager;
+ }
- @Override
- public void update(ITupleReference tuple, IndexOpContext ictx)
- throws Exception {
- throw new Exception("RTree Update not implemented.");
- }
+ @Override
+ public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ throw new Exception("RTree Update not implemented.");
+ }
- public final class BulkLoadContext implements IIndexBulkLoadContext {
+ public final class BulkLoadContext implements IIndexBulkLoadContext {
- public RTreeOpContext insertOpCtx;
+ public RTreeOpContext insertOpCtx;
- public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame,
- IRTreeFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame)
- throws HyracksDataException {
+ public BulkLoadContext(float fillFactor, IRTreeFrame leafFrame, IRTreeFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
- insertOpCtx = createOpContext(IndexOp.INSERT, leafFrame,
- interiorFrame, metaFrame);
- }
- }
+ insertOpCtx = createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+ }
+ }
- @Override
- public IIndexBulkLoadContext beginBulkLoad(float fillFactor,
- ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
- ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
- if (loaded)
- throw new HyracksDataException(
- "Trying to bulk-load RTree but RTree has already been loaded.");
+ @Override
+ public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
+ ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+ if (loaded)
+ throw new HyracksDataException("Trying to bulk-load RTree but RTree has already been loaded.");
- BulkLoadContext ctx = new BulkLoadContext(fillFactor,
- (IRTreeFrame) leafFrame, (IRTreeFrame) interiorFrame, metaFrame);
- return ctx;
- }
+ BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IRTreeFrame) leafFrame, (IRTreeFrame) interiorFrame,
+ metaFrame);
+ return ctx;
+ }
- @Override
- public void bulkLoadAddTuple(IIndexBulkLoadContext ictx,
- ITupleReference tuple) throws HyracksDataException {
- try {
- insert(tuple, ((BulkLoadContext) ictx).insertOpCtx);
- } catch (Exception e) {
- throw new HyracksDataException("BulkLoad Error");
- }
- }
+ @Override
+ public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException {
+ try {
+ insert(tuple, ((BulkLoadContext) ictx).insertOpCtx);
+ } catch (Exception e) {
+ throw new HyracksDataException("BulkLoad Error");
+ }
+ }
- @Override
- public void endBulkLoad(IIndexBulkLoadContext ictx)
- throws HyracksDataException {
- loaded = true;
- }
+ @Override
+ public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+ loaded = true;
+ }
- @Override
- public void diskOrderScan(ITreeIndexCursor icursor,
- ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
- IndexOpContext ictx) throws HyracksDataException {
- TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
- RTreeOpContext ctx = (RTreeOpContext) ictx;
- ctx.reset();
+ @Override
+ public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
+ IndexOpContext ictx) throws HyracksDataException {
+ TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+ RTreeOpContext ctx = (RTreeOpContext) ictx;
+ ctx.reset();
- int currentPageId = rootPage + 1;
- int maxPageId = freePageManager.getMaxPage(metaFrame);
+ int currentPageId = rootPage + 1;
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
- ICachedPage page = bufferCache.pin(
- BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
- page.acquireReadLatch();
- cursor.setBufferCache(bufferCache);
- cursor.setFileId(fileId);
- cursor.setCurrentPageId(currentPageId);
- cursor.setMaxPageId(maxPageId);
- ctx.cursorInitialState.setPage(page);
- cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
- }
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+ page.acquireReadLatch();
+ cursor.setBufferCache(bufferCache);
+ cursor.setFileId(fileId);
+ cursor.setCurrentPageId(currentPageId);
+ cursor.setMaxPageId(maxPageId);
+ ctx.cursorInitialState.setPage(page);
+ cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
+ }
- @Override
- public int getRootPageId() {
- return rootPage;
- }
+ @Override
+ public int getRootPageId() {
+ return rootPage;
+ }
- @Override
- public int getFieldCount() {
- return cmp.getFieldCount();
- }
+ @Override
+ public int getFieldCount() {
+ return cmp.getFieldCount();
+ }
- @Override
- public IndexType getIndexType() {
- return IndexType.RTREE;
- }
-}
\ No newline at end of file
+ @Override
+ public IndexType getIndexType() {
+ return IndexType.RTREE;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
index 6acac7e..244c69e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
@@ -23,36 +23,35 @@
public class RTreeTypeAwareTupleWriter extends TypeAwareTupleWriter {
- public RTreeTypeAwareTupleWriter(ITypeTrait[] typeTraits) {
- super(typeTraits);
- }
+ public RTreeTypeAwareTupleWriter(ITypeTrait[] typeTraits) {
+ super(typeTraits);
+ }
- public int writeTupleFields(ITreeIndexTupleReference[] refs,
- int startField, ByteBuffer targetBuf, int targetOff) {
- int runner = targetOff;
- int nullFlagsBytes = getNullFlagsBytes(refs.length);
- // write null indicator bits
- for (int i = 0; i < nullFlagsBytes; i++) {
- targetBuf.put(runner++, (byte) 0);
- }
+ public int writeTupleFields(ITreeIndexTupleReference[] refs, int startField, ByteBuffer targetBuf, int targetOff) {
+ int runner = targetOff;
+ int nullFlagsBytes = getNullFlagsBytes(refs.length);
+ // write null indicator bits
+ for (int i = 0; i < nullFlagsBytes; i++) {
+ targetBuf.put(runner++, (byte) 0);
+ }
- // write field slots for variable length fields
- // since the r-tree has fixed length keys, we don't actually need this?
- encDec.reset(targetBuf.array(), runner);
- for (int i = startField; i < startField + refs.length; i++) {
- if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
- encDec.encode(refs[i].getFieldLength(i));
- }
- }
- runner = encDec.getPos();
+ // write field slots for variable length fields
+ // since the r-tree has fixed length keys, we don't actually need this?
+ encDec.reset(targetBuf.array(), runner);
+ for (int i = startField; i < startField + refs.length; i++) {
+ if (typeTraits[i].getStaticallyKnownDataLength() == ITypeTrait.VARIABLE_LENGTH) {
+ encDec.encode(refs[i].getFieldLength(i));
+ }
+ }
+ runner = encDec.getPos();
- // write data
- for (int i = 0; i < refs.length; i++) {
- System.arraycopy(refs[i].getFieldData(i), refs[i].getFieldStart(i),
- targetBuf.array(), runner, refs[i].getFieldLength(i));
- runner += refs[i].getFieldLength(i);
- }
- return runner - targetOff;
+ // write data
+ for (int i = 0; i < refs.length; i++) {
+ System.arraycopy(refs[i].getFieldData(i), refs[i].getFieldStart(i), targetBuf.array(), runner,
+ refs[i].getFieldLength(i));
+ runner += refs[i].getFieldLength(i);
+ }
+ return runner - targetOff;
- }
+ }
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
index 7d88f2e..a27d8cd 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
@@ -19,19 +19,17 @@
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-public class RTreeTypeAwareTupleWriterFactory implements
- ITreeIndexTupleWriterFactory {
+public class RTreeTypeAwareTupleWriterFactory implements ITreeIndexTupleWriterFactory {
- private static final long serialVersionUID = 1L;
- private ITypeTrait[] typeTraits;
+ private static final long serialVersionUID = 1L;
+ private ITypeTrait[] typeTraits;
- public RTreeTypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
- this.typeTraits = typeTraits;
- }
+ public RTreeTypeAwareTupleWriterFactory(ITypeTrait[] typeTraits) {
+ this.typeTraits = typeTraits;
+ }
- @Override
- public ITreeIndexTupleWriter createTupleWriter() {
- return new RTreeTypeAwareTupleWriter(typeTraits);
- }
-
+ @Override
+ public ITreeIndexTupleWriter createTupleWriter() {
+ return new RTreeTypeAwareTupleWriter(typeTraits);
+ }
}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
index 4402e22..ab3d9d9 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
@@ -55,186 +55,171 @@
public class BTreeFieldPrefixNSMTest extends AbstractBTreeTest {
- private static final int PAGE_SIZE = 32768; // 32K
- private static final int NUM_PAGES = 40;
- private static final int MAX_OPEN_FILES = 10;
- private static final int HYRACKS_FRAME_SIZE = 128;
- private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
- private ITupleReference createTuple(IHyracksTaskContext ctx, int f0,
- int f1, int f2, boolean print) throws HyracksDataException {
- if (print)
- LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
+ private static final int PAGE_SIZE = 32768; // 32K
+ private static final int NUM_PAGES = 40;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
- ByteBuffer buf = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
- DataOutput dos = tb.getDataOutput();
+ private ITupleReference createTuple(IHyracksTaskContext ctx, int f0, int f1, int f2, boolean print)
+ throws HyracksDataException {
+ if (print)
+ LOGGER.info("CREATING: " + f0 + " " + f1 + " " + f2);
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(buf);
- FrameTupleReference tuple = new FrameTupleReference();
+ ByteBuffer buf = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
+ DataOutput dos = tb.getDataOutput();
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(buf);
+ FrameTupleReference tuple = new FrameTupleReference();
- appender.reset(buf, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
- tuple.reset(accessor, 0);
+ appender.reset(buf, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- return tuple;
- }
+ tuple.reset(accessor, 0);
- @Test
- public void test01() throws Exception {
-
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ return tuple;
+ }
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
+ @Test
+ public void test01() throws Exception {
- // declare keys
- int keyFieldCount = 3;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[2] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- // just for printing
- ISerializerDeserializer[] sers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
- Random rnd = new Random();
- rnd.setSeed(50);
+ // declare keys
+ int keyFieldCount = 3;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[2] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, 0), false);
- try {
+ // just for printing
+ ISerializerDeserializer[] sers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
- IPrefixSlotManager slotManager = new FieldPrefixSlotManager();
- ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
- BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(
- tupleWriter);
- frame.setPage(page);
- frame.initBuffer((byte) 0);
- slotManager.setFrame(frame);
- frame.setPrefixTupleCount(0);
+ Random rnd = new Random();
+ rnd.setSeed(50);
- String before = new String();
- String after = new String();
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, 0), false);
+ try {
- int compactFreq = 5;
- int compressFreq = 5;
- int smallMax = 10;
- int numRecords = 1000;
+ IPrefixSlotManager slotManager = new FieldPrefixSlotManager();
+ ITreeIndexTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+ BTreeFieldPrefixNSMLeafFrame frame = new BTreeFieldPrefixNSMLeafFrame(tupleWriter);
+ frame.setPage(page);
+ frame.initBuffer((byte) 0);
+ slotManager.setFrame(frame);
+ frame.setPrefixTupleCount(0);
- int[][] savedFields = new int[numRecords][3];
+ String before = new String();
+ String after = new String();
- // insert records with random calls to compact and compress
- for (int i = 0; i < numRecords; i++) {
+ int compactFreq = 5;
+ int compressFreq = 5;
+ int smallMax = 10;
+ int numRecords = 1000;
- if ((i + 1) % 100 == 0)
- LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
+ int[][] savedFields = new int[numRecords][3];
- int a = rnd.nextInt() % smallMax;
- int b = rnd.nextInt() % smallMax;
- int c = i;
+ // insert records with random calls to compact and compress
+ for (int i = 0; i < numRecords; i++) {
- ITupleReference tuple = createTuple(ctx, a, b, c, false);
- try {
- int targetTupleIndex = frame.findTupleIndex(tuple, cmp);
- frame.insert(tuple, cmp, targetTupleIndex);
- } catch (BTreeException e) {
- e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
+ if ((i + 1) % 100 == 0)
+ LOGGER.info("INSERTING " + (i + 1) + " / " + numRecords);
- savedFields[i][0] = a;
- savedFields[i][1] = b;
- savedFields[i][2] = c;
+ int a = rnd.nextInt() % smallMax;
+ int b = rnd.nextInt() % smallMax;
+ int c = i;
+
+ ITupleReference tuple = createTuple(ctx, a, b, c, false);
+ try {
+ int targetTupleIndex = frame.findTupleIndex(tuple, cmp, true);
+ frame.insert(tuple, cmp, targetTupleIndex);
+ } catch (BTreeException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
- if (rnd.nextInt() % compactFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compact(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
+ savedFields[i][0] = a;
+ savedFields[i][1] = b;
+ savedFields[i][2] = c;
- if (rnd.nextInt() % compressFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compress(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
+ if (rnd.nextInt() % compactFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compact(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
- }
+ if (rnd.nextInt() % compressFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compress(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
- // delete records with random calls to compact and compress
- for (int i = 0; i < numRecords; i++) {
+ }
- if ((i + 1) % 100 == 0)
- LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
+ // delete records with random calls to compact and compress
+ for (int i = 0; i < numRecords; i++) {
- ITupleReference tuple = createTuple(ctx,
- savedFields[i][0], savedFields[i][1],
- savedFields[i][2], false);
- try {
- frame.delete(tuple, cmp, true);
- } catch (Exception e) {
- }
+ if ((i + 1) % 100 == 0)
+ LOGGER.info("DELETING " + (i + 1) + " / " + numRecords);
- if (rnd.nextInt() % compactFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compact(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
+ ITupleReference tuple = createTuple(ctx, savedFields[i][0], savedFields[i][1], savedFields[i][2], false);
+ try {
+ frame.delete(tuple, cmp, true);
+ } catch (Exception e) {
+ }
- if (rnd.nextInt() % compressFreq == 0) {
- before = frame.printKeys(cmp, sers);
- frame.compress(cmp);
- after = frame.printKeys(cmp, sers);
- Assert.assertEquals(before, after);
- }
- }
+ if (rnd.nextInt() % compactFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compact(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
- } finally {
- bufferCache.unpin(page);
- }
+ if (rnd.nextInt() % compressFreq == 0) {
+ before = frame.printKeys(cmp, sers);
+ frame.compress(cmp);
+ after = frame.printKeys(cmp, sers);
+ Assert.assertEquals(before, after);
+ }
+ }
- bufferCache.closeFile(fileId);
- bufferCache.close();
- }
+ } finally {
+ bufferCache.unpin(page);
+ }
+
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
index a9debd5..4c7e429 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
@@ -15,9 +15,13 @@
package edu.uci.ics.hyracks.storage.am.btree;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.File;
import java.nio.ByteBuffer;
+import java.util.HashMap;
+import java.util.Map;
import java.util.Random;
import org.junit.Test;
@@ -29,6 +33,7 @@
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.api.io.FileReference;
import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
@@ -107,7 +112,7 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
@@ -313,9 +318,8 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ //ITreeIndexFrameFactory leafFrameFactory = new BTreeFieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
@@ -499,9 +503,7 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- // TypeAwareTupleWriterFactory tupleWriterFactory = new
- // TypeAwareTupleWriterFactory(typeTraits);
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
@@ -674,8 +676,6 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
@@ -804,12 +804,216 @@
bufferCache.close();
}
+
+ private void orderedScan(BTree btree, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] recDescSers) throws Exception {
+ // try a simple index scan
+ LOGGER.info("ORDERED SCAN:");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
+ StringBuilder scanResults = new StringBuilder();
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = btree.getMultiComparator().printTuple(frameTuple, recDescSers);
+ scanResults.append("\n" + rec);
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+ LOGGER.info(scanResults.toString());
+ }
+
+ // Assuming exactly two BTree fields.
+ private void compareActualAndExpected(ITreeIndexCursor actualCursor, Map<String, String> expectedValues, ISerializerDeserializer[] fieldSerdes) throws Exception {
+ while (actualCursor.hasNext()) {
+ actualCursor.next();
+ ITupleReference tuple = actualCursor.getTuple();
+ String f0 = (String) deserializeField(tuple, 0, fieldSerdes[0]);
+ String f1 = (String) deserializeField(tuple, 1, fieldSerdes[1]);
+ String expected = expectedValues.get(f0);
+ if (!expected.equals(f1)) {
+ throw new Exception("FAILED: " + f0 + " " + f1 + " " + expected);
+ }
+ }
+ }
+
+ private Object deserializeField(ITupleReference tuple, int fIdx, ISerializerDeserializer serde) throws HyracksDataException {
+ DataInputStream in = new DataInputStream(new ByteArrayInputStream(tuple.getFieldData(fIdx), tuple.getFieldStart(fIdx), tuple.getFieldLength(fIdx)));
+ return serde.deserialize(in);
+ }
+
+ // UPDATE TEST
+ // create a B-tree with one variable-length "key" field and one
+ // variable-length "value" field
+ // fill B-tree with random values using insertions, then update entries
+ // one-by-one
+ // repeat procedure a few times on same B-tree
+ @Test
+ public void test05() throws Exception {
+
+ LOGGER.info("DELETION TEST");
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ //TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] fieldSerdes = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(fieldSerdes);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+ BTreeOpContext updateOpCtx = btree.createOpContext(IndexOp.UPDATE, leafFrame, interiorFrame, metaFrame);
+
+ Map<String, String> expectedValues = new HashMap<String, String>();
+
+ LOGGER.info("INSERTING INTO BTREE");
+ int maxLength = 10;
+ int ins = 10000;
+ // Only remember the keys.
+ String[] f0s = new String[ins];
+ for (int i = 0; i < ins; i++) {
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+
+ f0s[i] = f0;
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ LOGGER.info("INSERTING " + i);
+ }
+ try {
+ btree.insert(tuple, insertOpCtx);
+ expectedValues.put(f0, f1);
+ } catch (TreeIndexException e) {
+ // e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ ITreeIndexCursor insertCheckCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(insertCheckCursor, nullPred, searchOpCtx);
+ try {
+ compareActualAndExpected(insertCheckCursor, expectedValues, fieldSerdes);
+ } finally {
+ insertCheckCursor.close();
+ }
+
+ int runs = 3;
+ for (int run = 0; run < runs; run++) {
+
+ LOGGER.info("UPDATE TEST RUN: " + (run + 1) + "/" + runs);
+
+ LOGGER.info("UPDATING BTREE");
+ for (int i = 0; i < ins; i++) {
+ // Generate a new random value for f1.
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ LOGGER.info("UPDATING " + i);
+ }
+
+ try {
+ btree.update(tuple, updateOpCtx);
+ expectedValues.put(f0s[i], f1);
+ } catch (TreeIndexException e) {
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ ITreeIndexCursor updateCheckCursor = new BTreeRangeSearchCursor(leafFrame);
+ btree.search(updateCheckCursor, nullPred, searchOpCtx);
+ try {
+ compareActualAndExpected(updateCheckCursor, expectedValues, fieldSerdes);
+ } finally {
+ updateCheckCursor.close();
+ }
+ }
+
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+ }
+
// BULK LOAD TEST
// insert 100,000 records in bulk
// B-tree has a composite key to "simulate" non-unique index creation
// do range search
@Test
- public void test05() throws Exception {
+ public void test06() throws Exception {
LOGGER.info("BULK LOAD TEST");
@@ -836,8 +1040,6 @@
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
@@ -965,7 +1167,7 @@
// demo for Arjun to show easy support of intersection queries on
// time-intervals
@Test
- public void test06() throws Exception {
+ public void test07() throws Exception {
LOGGER.info("TIME-INTERVAL INTERSECTION DEMO");
@@ -991,8 +1193,6 @@
cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);