Code changes based on Alex's code review comments
git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_indexes@517 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelperFactory.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelperFactory.java
index f784624..7802b81 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelperFactory.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/dataflow/BTreeOpHelperFactory.java
@@ -3,25 +3,22 @@
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeDiskOrderScanCursor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
public class BTreeOpHelperFactory implements ITreeIndexOpHelperFactory {
-
+
private static final long serialVersionUID = 1L;
@Override
public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
- IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode) {
+ IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode) {
return new BTreeOpHelper(opDesc, ctx, partition, mode);
}
-
- public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
- return new BTreeDiskOrderScanCursor((IBTreeLeafFrame)leafFrame);
- }
+
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index 06c8581..975d956 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -36,6 +36,7 @@
import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
@@ -45,1318 +46,1232 @@
public class BTree implements ITreeIndex {
- public static final float DEFAULT_FILL_FACTOR = 0.7f;
+ public static final float DEFAULT_FILL_FACTOR = 0.7f;
- private final static int RESTART_OP = Integer.MIN_VALUE;
- private final static int MAX_RESTARTS = 10;
+ private final static int RESTART_OP = Integer.MIN_VALUE;
+ private final static int MAX_RESTARTS = 10;
- // the root page never changes
- private final int rootPage = 1;
+ // the root page never changes
+ private final int rootPage = 1;
- private final IFreePageManager freePageManager;
+ private final IFreePageManager freePageManager;
- private boolean created = false;
- private boolean loaded = false;
+ private boolean created = false;
+ private boolean loaded = false;
- private final IBufferCache bufferCache;
- private int fileId;
- private final ITreeIndexFrameFactory interiorFrameFactory;
- private final ITreeIndexFrameFactory leafFrameFactory;
- private final MultiComparator cmp;
- private final ReadWriteLock treeLatch;
- private final RangePredicate diskOrderScanPredicate;
+ private final IBufferCache bufferCache;
+ private int fileId;
+ private final ITreeIndexFrameFactory interiorFrameFactory;
+ private final ITreeIndexFrameFactory leafFrameFactory;
+ private final MultiComparator cmp;
+ private final ReadWriteLock treeLatch;
+ private final RangePredicate diskOrderScanPredicate;
- public int rootSplits = 0;
- public int[] splitsByLevel = new int[500];
- public long readLatchesAcquired = 0;
- public long readLatchesReleased = 0;
- public long writeLatchesAcquired = 0;
- public long writeLatchesReleased = 0;
- public long pins = 0;
- public long unpins = 0;
+ public int rootSplits = 0;
+ public int[] splitsByLevel = new int[500];
+ public long readLatchesAcquired = 0;
+ public long readLatchesReleased = 0;
+ public long writeLatchesAcquired = 0;
+ public long writeLatchesReleased = 0;
+ public long pins = 0;
+ public long unpins = 0;
- public long treeLatchesAcquired = 0;
- public long treeLatchesReleased = 0;
+ public long treeLatchesAcquired = 0;
+ public long treeLatchesReleased = 0;
- public byte currentLevel = 0;
+ public byte currentLevel = 0;
- public int usefulCompression = 0;
- public int uselessCompression = 0;
+ public int usefulCompression = 0;
+ public int uselessCompression = 0;
- public void treeLatchStatus() {
- System.out.println(treeLatch.writeLock().toString());
- }
+ public void treeLatchStatus() {
+ System.out.println(treeLatch.writeLock().toString());
+ }
- public String printStats() {
- StringBuilder strBuilder = new StringBuilder();
- strBuilder.append("\n");
- strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
- strBuilder.append("SPLITS BY LEVEL\n");
- for (int i = 0; i < currentLevel; i++) {
- strBuilder.append(String.format("%3d ", i)
- + String.format("%8d ", splitsByLevel[i]) + "\n");
- }
- strBuilder.append(String.format("READ LATCHES: %10d %10d\n",
- readLatchesAcquired, readLatchesReleased));
- strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n",
- writeLatchesAcquired, writeLatchesReleased));
- strBuilder.append(String.format("PINS: %10d %10d\n", pins,
- unpins));
- return strBuilder.toString();
- }
+ public String printStats() {
+ StringBuilder strBuilder = new StringBuilder();
+ strBuilder.append("\n");
+ strBuilder.append("ROOTSPLITS: " + rootSplits + "\n");
+ strBuilder.append("SPLITS BY LEVEL\n");
+ for (int i = 0; i < currentLevel; i++) {
+ strBuilder.append(String.format("%3d ", i) + String.format("%8d ", splitsByLevel[i]) + "\n");
+ }
+ strBuilder.append(String.format("READ LATCHES: %10d %10d\n", readLatchesAcquired, readLatchesReleased));
+ strBuilder.append(String.format("WRITE LATCHES: %10d %10d\n", writeLatchesAcquired, writeLatchesReleased));
+ strBuilder.append(String.format("PINS: %10d %10d\n", pins, unpins));
+ return strBuilder.toString();
+ }
- public BTree(IBufferCache bufferCache, IFreePageManager freePageManager,
- ITreeIndexFrameFactory interiorFrameFactory,
- ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
- this.bufferCache = bufferCache;
- this.interiorFrameFactory = interiorFrameFactory;
- this.leafFrameFactory = leafFrameFactory;
- this.cmp = cmp;
- this.freePageManager = freePageManager;
- this.treeLatch = new ReentrantReadWriteLock(true);
- this.diskOrderScanPredicate = new RangePredicate(true, null, null,
- true, true, cmp, cmp);
- }
+ public BTree(IBufferCache bufferCache, IFreePageManager freePageManager,
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp) {
+ this.bufferCache = bufferCache;
+ this.interiorFrameFactory = interiorFrameFactory;
+ this.leafFrameFactory = leafFrameFactory;
+ this.cmp = cmp;
+ this.freePageManager = freePageManager;
+ this.treeLatch = new ReentrantReadWriteLock(true);
+ this.diskOrderScanPredicate = new RangePredicate(true, null, null, true, true, cmp, cmp);
+ }
- @Override
- public void create(int fileId, ITreeIndexFrame leafFrame,
- ITreeIndexMetaDataFrame metaFrame) throws Exception {
+ @Override
+ public void create(int fileId, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame) throws Exception {
- if (created)
- return;
+ if (created)
+ return;
- treeLatch.writeLock().lock();
- try {
+ treeLatch.writeLock().lock();
+ try {
- // check if another thread beat us to it
- if (created)
- return;
+ // check if another thread beat us to it
+ if (created)
+ return;
- freePageManager.init(metaFrame, rootPage);
+ freePageManager.init(metaFrame, rootPage);
- // initialize root page
- ICachedPage rootNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, rootPage), true);
- pins++;
+ // initialize root page
+ ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), true);
+ pins++;
- rootNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- leafFrame.setPage(rootNode);
- leafFrame.initBuffer((byte) 0);
- } finally {
- rootNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rootNode);
- unpins++;
- }
- currentLevel = 0;
+ rootNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ leafFrame.setPage(rootNode);
+ leafFrame.initBuffer((byte) 0);
+ } finally {
+ rootNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rootNode);
+ unpins++;
+ }
+ currentLevel = 0;
- created = true;
- } finally {
- treeLatch.writeLock().unlock();
- }
- }
+ created = true;
+ } finally {
+ treeLatch.writeLock().unlock();
+ }
+ }
- public void open(int fileId) {
- this.fileId = fileId;
- }
+ public void open(int fileId) {
+ this.fileId = fileId;
+ }
- public void close() {
- fileId = -1;
- }
+ public void close() {
+ fileId = -1;
+ }
- private void addFreePages(BTreeOpContext ctx) throws Exception {
- for (int i = 0; i < ctx.freePages.size(); i++) {
- // root page is special, don't add it to free pages
- if (ctx.freePages.get(i) != rootPage) {
- freePageManager
- .addFreePage(ctx.metaFrame, ctx.freePages.get(i));
- }
- }
- ctx.freePages.clear();
- }
+ private void addFreePages(BTreeOpContext ctx) throws Exception {
+ for (int i = 0; i < ctx.freePages.size(); i++) {
+ // root page is special, don't add it to free pages
+ if (ctx.freePages.get(i) != rootPage) {
+ freePageManager.addFreePage(ctx.metaFrame, ctx.freePages.get(i));
+ }
+ }
+ ctx.freePages.clear();
+ }
- public void printTree(IBTreeLeafFrame leafFrame,
- IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields)
- throws Exception {
- printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
- }
+ public void printTree(IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields)
+ throws Exception {
+ printTree(rootPage, null, false, leafFrame, interiorFrame, fields);
+ }
- public void printTree(int pageId, ICachedPage parent, boolean unpin,
- IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- ISerializerDeserializer[] fields) throws Exception {
+ public void printTree(int pageId, ICachedPage parent, boolean unpin, IBTreeLeafFrame leafFrame,
+ IBTreeInteriorFrame interiorFrame, ISerializerDeserializer[] fields) throws Exception {
- ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, pageId), false);
- pins++;
- node.acquireReadLatch();
- readLatchesAcquired++;
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ pins++;
+ node.acquireReadLatch();
+ readLatchesAcquired++;
- try {
- if (parent != null && unpin == true) {
- parent.releaseReadLatch();
- readLatchesReleased++;
+ try {
+ if (parent != null && unpin == true) {
+ parent.releaseReadLatch();
+ readLatchesReleased++;
- bufferCache.unpin(parent);
- unpins++;
- }
+ bufferCache.unpin(parent);
+ unpins++;
+ }
- interiorFrame.setPage(node);
- int level = interiorFrame.getLevel();
+ interiorFrame.setPage(node);
+ int level = interiorFrame.getLevel();
- System.out.format("%1d ", level);
- System.out.format("%3d ", pageId);
- for (int i = 0; i < currentLevel - level; i++)
- System.out.format(" ");
+ System.out.format("%1d ", level);
+ System.out.format("%3d ", pageId);
+ for (int i = 0; i < currentLevel - level; i++)
+ System.out.format(" ");
- String keyString;
- if (interiorFrame.isLeaf()) {
- leafFrame.setPage(node);
- keyString = leafFrame.printKeys(cmp, fields);
- } else {
- keyString = interiorFrame.printKeys(cmp, fields);
- }
+ String keyString;
+ if (interiorFrame.isLeaf()) {
+ leafFrame.setPage(node);
+ keyString = leafFrame.printKeys(cmp, fields);
+ } else {
+ keyString = interiorFrame.printKeys(cmp, fields);
+ }
- System.out.format(keyString);
- if (!interiorFrame.isLeaf()) {
- ArrayList<Integer> children = ((NSMInteriorFrame) (interiorFrame))
- .getChildren(cmp);
+ System.out.format(keyString);
+ if (!interiorFrame.isLeaf()) {
+ ArrayList<Integer> children = ((NSMInteriorFrame) (interiorFrame)).getChildren(cmp);
- for (int i = 0; i < children.size(); i++) {
- printTree(children.get(i), node, i == children.size() - 1,
- leafFrame, interiorFrame, fields);
- }
- } else {
- node.releaseReadLatch();
- readLatchesReleased++;
+ for (int i = 0; i < children.size(); i++) {
+ printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
+ }
+ } else {
+ node.releaseReadLatch();
+ readLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- }
- } catch (Exception e) {
- node.releaseReadLatch();
- readLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
+ }
+ } catch (Exception e) {
+ node.releaseReadLatch();
+ readLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- e.printStackTrace();
- }
- }
+ bufferCache.unpin(node);
+ unpins++;
+ e.printStackTrace();
+ }
+ }
- @Override
- public void diskOrderScan(ITreeIndexCursor icursor,
- ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame, IndexOpContext ictx)
- throws HyracksDataException {
- BTreeDiskOrderScanCursor cursor = (BTreeDiskOrderScanCursor)icursor;
- BTreeOpContext ctx = (BTreeOpContext) ictx;
- ctx.reset();
-
- int currentPageId = rootPage + 1;
- int maxPageId = freePageManager.getMaxPage(metaFrame);
+ @Override
+ public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
+ IndexOpContext ictx) throws HyracksDataException {
+ TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
+ BTreeOpContext ctx = (BTreeOpContext) ictx;
+ ctx.reset();
- ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, currentPageId), false);
- page.acquireReadLatch();
- cursor.setBufferCache(bufferCache);
- cursor.setFileId(fileId);
- cursor.setCurrentPageId(currentPageId);
- cursor.setMaxPageId(maxPageId);
- ctx.cursorInitialState.setPage(page);
- cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
- }
+ int currentPageId = rootPage + 1;
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
- public void search(ITreeIndexCursor cursor, RangePredicate pred,
- BTreeOpContext ctx) throws Exception {
- ctx.reset();
- ctx.pred = pred;
- ctx.cursor = cursor;
- // simple index scan
- if (ctx.pred.getLowKeyComparator() == null)
- ctx.pred.setLowKeyComparator(cmp);
- if (ctx.pred.getHighKeyComparator() == null)
- ctx.pred.setHighKeyComparator(cmp);
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
+ page.acquireReadLatch();
+ cursor.setBufferCache(bufferCache);
+ cursor.setFileId(fileId);
+ cursor.setCurrentPageId(currentPageId);
+ cursor.setMaxPageId(maxPageId);
+ ctx.cursorInitialState.setPage(page);
+ cursor.open(ctx.cursorInitialState, diskOrderScanPredicate);
+ }
- boolean repeatOp = true;
- // we use this loop to deal with possibly multiple operation restarts
- // due to ongoing structure modifications during the descent
- while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
- performOp(rootPage, null, ctx);
+ public void search(ITreeIndexCursor cursor, RangePredicate pred, BTreeOpContext ctx) throws Exception {
+ ctx.reset();
+ ctx.pred = pred;
+ ctx.cursor = cursor;
+ // simple index scan
+ if (ctx.pred.getLowKeyComparator() == null)
+ ctx.pred.setLowKeyComparator(cmp);
+ if (ctx.pred.getHighKeyComparator() == null)
+ ctx.pred.setHighKeyComparator(cmp);
- // if we reach this stage then we need to restart from the (possibly
- // new) root
- if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op indicator
- continue;
- }
+ boolean repeatOp = true;
+ // we use this loop to deal with possibly multiple operation restarts
+ // due to ongoing structure modifications during the descent
+ while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+ performOp(rootPage, null, ctx);
- repeatOp = false;
- }
+ // if we reach this stage then we need to restart from the (possibly
+ // new) root
+ if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
+ ctx.pageLsns.removeLast(); // pop the restart op indicator
+ continue;
+ }
- cursor.setBufferCache(bufferCache);
- cursor.setFileId(fileId);
- }
+ repeatOp = false;
+ }
- private void unsetSmPages(BTreeOpContext ctx) throws HyracksDataException {
- ICachedPage originalPage = ctx.interiorFrame.getPage();
- for (int i = 0; i < ctx.smPages.size(); i++) {
- int pageId = ctx.smPages.get(i);
- ICachedPage smPage = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, pageId), false);
- pins++;
- smPage.acquireWriteLatch(); // TODO: would like to set page dirty
- // without latching
- writeLatchesAcquired++;
- try {
- ctx.interiorFrame.setPage(smPage);
- ctx.interiorFrame.setSmFlag(false);
- } finally {
- smPage.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(smPage);
- unpins++;
- }
- }
- if (ctx.smPages.size() > 0) {
- treeLatch.writeLock().unlock();
- treeLatchesReleased++;
- ctx.smPages.clear();
- }
- ctx.interiorFrame.setPage(originalPage);
- }
+ cursor.setBufferCache(bufferCache);
+ cursor.setFileId(fileId);
+ }
- private void createNewRoot(BTreeOpContext ctx) throws Exception {
- rootSplits++; // debug
- splitsByLevel[currentLevel]++;
- currentLevel++;
+ private void unsetSmPages(BTreeOpContext ctx) throws HyracksDataException {
+ ICachedPage originalPage = ctx.interiorFrame.getPage();
+ for (int i = 0; i < ctx.smPages.size(); i++) {
+ int pageId = ctx.smPages.get(i);
+ ICachedPage smPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ pins++;
+ smPage.acquireWriteLatch(); // TODO: would like to set page dirty
+ // without latching
+ writeLatchesAcquired++;
+ try {
+ ctx.interiorFrame.setPage(smPage);
+ ctx.interiorFrame.setSmFlag(false);
+ } finally {
+ smPage.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(smPage);
+ unpins++;
+ }
+ }
+ if (ctx.smPages.size() > 0) {
+ treeLatch.writeLock().unlock();
+ treeLatchesReleased++;
+ ctx.smPages.clear();
+ }
+ ctx.interiorFrame.setPage(originalPage);
+ }
- // make sure the root is always at the same level
- ICachedPage leftNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, ctx.splitKey.getLeftPage()), false);
- pins++;
- leftNode.acquireWriteLatch(); // TODO: think about whether latching is
- // really required
- writeLatchesAcquired++;
- try {
- ICachedPage rightNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, ctx.splitKey.getRightPage()), false);
- pins++;
- rightNode.acquireWriteLatch(); // TODO: think about whether latching
- // is really required
- writeLatchesAcquired++;
- try {
- int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
- ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, newLeftId), true);
- pins++;
- newLeftNode.acquireWriteLatch(); // TODO: think about whether
- // latching is really
- // required
- writeLatchesAcquired++;
- try {
- // copy left child to new left child
- System.arraycopy(leftNode.getBuffer().array(), 0,
- newLeftNode.getBuffer().array(), 0, newLeftNode
- .getBuffer().capacity());
- ctx.interiorFrame.setPage(newLeftNode);
- ctx.interiorFrame.setSmFlag(false);
+ private void createNewRoot(BTreeOpContext ctx) throws Exception {
+ rootSplits++; // debug
+ splitsByLevel[currentLevel]++;
+ currentLevel++;
- // change sibling pointer if children are leaves
- ctx.leafFrame.setPage(rightNode);
- if (ctx.leafFrame.isLeaf()) {
- ctx.leafFrame.setPrevLeaf(newLeftId);
- }
+ // make sure the root is always at the same level
+ ICachedPage leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getLeftPage()),
+ false);
+ pins++;
+ leftNode.acquireWriteLatch(); // TODO: think about whether latching is
+ // really required
+ writeLatchesAcquired++;
+ try {
+ ICachedPage rightNode = bufferCache.pin(
+ BufferedFileHandle.getDiskPageId(fileId, ctx.splitKey.getRightPage()), false);
+ pins++;
+ rightNode.acquireWriteLatch(); // TODO: think about whether latching
+ // is really required
+ writeLatchesAcquired++;
+ try {
+ int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
+ pins++;
+ newLeftNode.acquireWriteLatch(); // TODO: think about whether
+ // latching is really
+ // required
+ writeLatchesAcquired++;
+ try {
+ // copy left child to new left child
+ System.arraycopy(leftNode.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0, newLeftNode
+ .getBuffer().capacity());
+ ctx.interiorFrame.setPage(newLeftNode);
+ ctx.interiorFrame.setSmFlag(false);
- // initialize new root (leftNode becomes new root)
- ctx.interiorFrame.setPage(leftNode);
- ctx.interiorFrame.initBuffer((byte) (ctx.leafFrame
- .getLevel() + 1));
- ctx.interiorFrame.setSmFlag(true); // will be cleared later
- // in unsetSmPages
- ctx.splitKey.setLeftPage(newLeftId);
- int targetTupleIndex = ctx.interiorFrame.findTupleIndex(
- ctx.splitKey.getTuple(), cmp);
- ctx.interiorFrame.insert(ctx.splitKey.getTuple(), cmp,
- targetTupleIndex);
- } finally {
- newLeftNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(newLeftNode);
- unpins++;
- }
- } finally {
- rightNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rightNode);
- unpins++;
- }
- } finally {
- leftNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(leftNode);
- unpins++;
- }
- }
+ // change sibling pointer if children are leaves
+ ctx.leafFrame.setPage(rightNode);
+ if (ctx.leafFrame.isLeaf()) {
+ ctx.leafFrame.setPrevLeaf(newLeftId);
+ }
- @Override
- public void insert(ITupleReference tuple, IndexOpContext ictx)
- throws Exception {
- BTreeOpContext ctx = (BTreeOpContext) ictx;
- ctx.reset();
- ctx.pred.setLowKeyComparator(cmp);
- ctx.pred.setHighKeyComparator(cmp);
- ctx.pred.setLowKey(tuple, true);
- ctx.pred.setHighKey(tuple, true);
- ctx.splitKey.reset();
- ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
+ // initialize new root (leftNode becomes new root)
+ ctx.interiorFrame.setPage(leftNode);
+ ctx.interiorFrame.initBuffer((byte) (ctx.leafFrame.getLevel() + 1));
+ ctx.interiorFrame.setSmFlag(true); // will be cleared later
+ // in unsetSmPages
+ ctx.splitKey.setLeftPage(newLeftId);
+ int targetTupleIndex = ctx.interiorFrame.findTupleIndex(ctx.splitKey.getTuple(), cmp);
+ ctx.interiorFrame.insert(ctx.splitKey.getTuple(), cmp, targetTupleIndex);
+ } finally {
+ newLeftNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(newLeftNode);
+ unpins++;
+ }
+ } finally {
+ rightNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rightNode);
+ unpins++;
+ }
+ } finally {
+ leftNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(leftNode);
+ unpins++;
+ }
+ }
- boolean repeatOp = true;
- // we use this loop to deal with possibly multiple operation restarts
- // due to ongoing structure modifications during the descent
- while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
- performOp(rootPage, null, ctx);
+ @Override
+ public void insert(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ BTreeOpContext ctx = (BTreeOpContext) ictx;
+ ctx.reset();
+ ctx.pred.setLowKeyComparator(cmp);
+ ctx.pred.setHighKeyComparator(cmp);
+ ctx.pred.setLowKey(tuple, true);
+ ctx.pred.setHighKey(tuple, true);
+ ctx.splitKey.reset();
+ ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
- // if we reach this stage then we need to restart from the (possibly
- // new) root
- if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op indicator
- continue;
- }
+ boolean repeatOp = true;
+ // we use this loop to deal with possibly multiple operation restarts
+ // due to ongoing structure modifications during the descent
+ while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+ performOp(rootPage, null, ctx);
- // we split the root, here is the key for a new root
- if (ctx.splitKey.getBuffer() != null) {
- createNewRoot(ctx);
- }
+ // if we reach this stage then we need to restart from the (possibly
+ // new) root
+ if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
+ ctx.pageLsns.removeLast(); // pop the restart op indicator
+ continue;
+ }
- unsetSmPages(ctx);
+ // we split the root, here is the key for a new root
+ if (ctx.splitKey.getBuffer() != null) {
+ createNewRoot(ctx);
+ }
- repeatOp = false;
- }
- }
+ unsetSmPages(ctx);
- public long uselessCompressionTime = 0;
+ repeatOp = false;
+ }
+ }
- private void insertLeaf(ICachedPage node, int pageId,
- ITupleReference tuple, BTreeOpContext ctx) throws Exception {
- ctx.leafFrame.setPage(node);
- ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ public long uselessCompressionTime = 0;
- int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
- FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple,
- cmp);
- switch (spaceStatus) {
+ private void insertLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ ctx.leafFrame.setPage(node);
+ ctx.leafFrame.setPageTupleFieldCount(cmp.getFieldCount());
- case SUFFICIENT_CONTIGUOUS_SPACE: {
- // System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
- ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
- ctx.splitKey.reset();
- }
- break;
+ int targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
+ FrameOpSpaceStatus spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+ switch (spaceStatus) {
- case SUFFICIENT_SPACE: {
- // System.out.println("SUFFICIENT_SPACE");
- boolean slotsChanged = ctx.leafFrame.compact(cmp);
- if (slotsChanged)
- targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
- ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
- ctx.splitKey.reset();
- }
- break;
+ case SUFFICIENT_CONTIGUOUS_SPACE: {
+ // System.out.println("SUFFICIENT_CONTIGUOUS_SPACE");
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+ ctx.splitKey.reset();
+ }
+ break;
- case INSUFFICIENT_SPACE: {
- // System.out.println("INSUFFICIENT_SPACE");
+ case SUFFICIENT_SPACE: {
+ // System.out.println("SUFFICIENT_SPACE");
+ boolean slotsChanged = ctx.leafFrame.compact(cmp);
+ if (slotsChanged)
+ targetTupleIndex = ctx.leafFrame.findTupleIndex(tuple, cmp);
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+ ctx.splitKey.reset();
+ }
+ break;
- // try compressing the page first and see if there is space
- // available
- long start = System.currentTimeMillis();
- boolean reCompressed = ctx.leafFrame.compress(cmp);
- long end = System.currentTimeMillis();
- if (reCompressed)
- spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
+ case INSUFFICIENT_SPACE: {
+ // System.out.println("INSUFFICIENT_SPACE");
- if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
- ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
- ctx.splitKey.reset();
+ // try compressing the page first and see if there is space
+ // available
+ long start = System.currentTimeMillis();
+ boolean reCompressed = ctx.leafFrame.compress(cmp);
+ long end = System.currentTimeMillis();
+ if (reCompressed)
+ spaceStatus = ctx.leafFrame.hasSpaceInsert(tuple, cmp);
- usefulCompression++;
- } else {
+ if (spaceStatus == FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE) {
+ ctx.leafFrame.insert(tuple, cmp, targetTupleIndex);
+ ctx.splitKey.reset();
- uselessCompressionTime += (end - start);
- uselessCompression++;
+ usefulCompression++;
+ } else {
- // perform split
- splitsByLevel[0]++; // debug
- int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
- ICachedPage rightSibling = null;
- if (rightSiblingPageId > 0) {
- rightSibling = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, rightSiblingPageId), false);
- pins++;
- }
+ uselessCompressionTime += (end - start);
+ uselessCompression++;
- treeLatch.writeLock().lock(); // lock is released in
- // unsetSmPages(), after sm has
- // fully completed
- treeLatchesAcquired++;
- try {
+ // perform split
+ splitsByLevel[0]++; // debug
+ int rightSiblingPageId = ctx.leafFrame.getNextLeaf();
+ ICachedPage rightSibling = null;
+ if (rightSiblingPageId > 0) {
+ rightSibling = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightSiblingPageId),
+ false);
+ pins++;
+ }
- int rightPageId = freePageManager
- .getFreePage(ctx.metaFrame);
- ICachedPage rightNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, rightPageId), true);
- pins++;
- rightNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- IBTreeLeafFrame rightFrame = (IBTreeLeafFrame)leafFrameFactory
- .createFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) 0);
- rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
+ treeLatch.writeLock().lock(); // lock is released in
+ // unsetSmPages(), after sm has
+ // fully completed
+ treeLatchesAcquired++;
+ try {
- int ret = ctx.leafFrame.split(rightFrame, tuple, cmp,
- ctx.splitKey);
+ int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId),
+ true);
+ pins++;
+ rightNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ IBTreeLeafFrame rightFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) 0);
+ rightFrame.setPageTupleFieldCount(cmp.getFieldCount());
- ctx.smPages.add(pageId);
- ctx.smPages.add(rightPageId);
- ctx.leafFrame.setSmFlag(true);
- rightFrame.setSmFlag(true);
+ int ret = ctx.leafFrame.split(rightFrame, tuple, cmp, ctx.splitKey);
- rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
- rightFrame.setPrevLeaf(pageId);
- ctx.leafFrame.setNextLeaf(rightPageId);
+ ctx.smPages.add(pageId);
+ ctx.smPages.add(rightPageId);
+ ctx.leafFrame.setSmFlag(true);
+ rightFrame.setSmFlag(true);
- // TODO: we just use increasing numbers as pageLsn,
- // we
- // should tie this together with the LogManager and
- // TransactionManager
- rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
- ctx.leafFrame
- .setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+ rightFrame.setNextLeaf(ctx.leafFrame.getNextLeaf());
+ rightFrame.setPrevLeaf(pageId);
+ ctx.leafFrame.setNextLeaf(rightPageId);
- if (ret != 0) {
- ctx.splitKey.reset();
- } else {
- // System.out.print("LEAF SPLITKEY: ");
- // cmp.printKey(splitKey.getData(), 0);
- // System.out.println("");
+ // TODO: we just use increasing numbers as pageLsn,
+ // we
+ // should tie this together with the LogManager and
+ // TransactionManager
+ rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
+ ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
- ctx.splitKey.setPages(pageId, rightPageId);
- }
- if (rightSibling != null) {
- rightSibling.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- rightFrame.setPage(rightSibling); // reuse
- // rightFrame
- // for
- // modification
- rightFrame.setPrevLeaf(rightPageId);
- } finally {
- rightSibling.releaseWriteLatch();
- writeLatchesReleased++;
- }
- }
- } finally {
- rightNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rightNode);
- unpins++;
- }
- } catch (Exception e) {
- treeLatch.writeLock().unlock();
- treeLatchesReleased++;
- throw e;
- } finally {
- if (rightSibling != null) {
- bufferCache.unpin(rightSibling);
- unpins++;
- }
- }
- }
- }
- break;
+ if (ret != 0) {
+ ctx.splitKey.reset();
+ } else {
+ // System.out.print("LEAF SPLITKEY: ");
+ // cmp.printKey(splitKey.getData(), 0);
+ // System.out.println("");
- }
+ ctx.splitKey.setPages(pageId, rightPageId);
+ }
+ if (rightSibling != null) {
+ rightSibling.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ rightFrame.setPage(rightSibling); // reuse
+ // rightFrame
+ // for
+ // modification
+ rightFrame.setPrevLeaf(rightPageId);
+ } finally {
+ rightSibling.releaseWriteLatch();
+ writeLatchesReleased++;
+ }
+ }
+ } finally {
+ rightNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rightNode);
+ unpins++;
+ }
+ } catch (Exception e) {
+ treeLatch.writeLock().unlock();
+ treeLatchesReleased++;
+ throw e;
+ } finally {
+ if (rightSibling != null) {
+ bufferCache.unpin(rightSibling);
+ unpins++;
+ }
+ }
+ }
+ }
+ break;
- node.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- }
+ }
- private void insertInterior(ICachedPage node, int pageId,
- ITupleReference tuple, BTreeOpContext ctx) throws Exception {
- ctx.interiorFrame.setPage(node);
- ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
+ }
- int targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
- FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(
- tuple, cmp);
- switch (spaceStatus) {
- case INSUFFICIENT_SPACE: {
- splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
- int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
- ICachedPage rightNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, rightPageId), true);
- pins++;
- rightNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- ITreeIndexFrame rightFrame = interiorFrameFactory.createFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
- rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
- // instead of creating a new split key, use the existing
- // splitKey
- int ret = ctx.interiorFrame.split(rightFrame, ctx.splitKey
- .getTuple(), cmp, ctx.splitKey);
+ private void insertInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
+ throws Exception {
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
- ctx.smPages.add(pageId);
- ctx.smPages.add(rightPageId);
- ctx.interiorFrame.setSmFlag(true);
- rightFrame.setSmFlag(true);
+ int targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
+ FrameOpSpaceStatus spaceStatus = ctx.interiorFrame.hasSpaceInsert(tuple, cmp);
+ switch (spaceStatus) {
+ case INSUFFICIENT_SPACE: {
+ splitsByLevel[ctx.interiorFrame.getLevel()]++; // debug
+ int rightPageId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
+ pins++;
+ rightNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ ITreeIndexFrame rightFrame = interiorFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+ rightFrame.setPageTupleFieldCount(cmp.getKeyFieldCount());
+ // instead of creating a new split key, use the existing
+ // splitKey
+ int ret = ctx.interiorFrame.split(rightFrame, ctx.splitKey.getTuple(), cmp, ctx.splitKey);
- // TODO: we just use increasing numbers as pageLsn, we
- // should tie this together with the LogManager and
- // TransactionManager
- rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
- ctx.interiorFrame
- .setPageLsn(ctx.interiorFrame.getPageLsn() + 1);
+ ctx.smPages.add(pageId);
+ ctx.smPages.add(rightPageId);
+ ctx.interiorFrame.setSmFlag(true);
+ rightFrame.setSmFlag(true);
- if (ret != 0) {
- ctx.splitKey.reset();
- } else {
- // System.out.print("INTERIOR SPLITKEY: ");
- // cmp.printKey(splitKey.getData(), 0);
- // System.out.println("");
+ // TODO: we just use increasing numbers as pageLsn, we
+ // should tie this together with the LogManager and
+ // TransactionManager
+ rightFrame.setPageLsn(rightFrame.getPageLsn() + 1);
+ ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1);
- ctx.splitKey.setPages(pageId, rightPageId);
- }
- } finally {
- rightNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rightNode);
- unpins++;
- }
- }
- break;
+ if (ret != 0) {
+ ctx.splitKey.reset();
+ } else {
+ // System.out.print("INTERIOR SPLITKEY: ");
+ // cmp.printKey(splitKey.getData(), 0);
+ // System.out.println("");
- case SUFFICIENT_CONTIGUOUS_SPACE: {
- // System.out.println("INSERT INTERIOR: " + pageId);
- ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
- ctx.splitKey.reset();
- }
- break;
+ ctx.splitKey.setPages(pageId, rightPageId);
+ }
+ } finally {
+ rightNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rightNode);
+ unpins++;
+ }
+ }
+ break;
- case SUFFICIENT_SPACE: {
- boolean slotsChanged = ctx.interiorFrame.compact(cmp);
- if (slotsChanged)
- targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
- ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
- ctx.splitKey.reset();
- }
- break;
+ case SUFFICIENT_CONTIGUOUS_SPACE: {
+ // System.out.println("INSERT INTERIOR: " + pageId);
+ ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
+ ctx.splitKey.reset();
+ }
+ break;
- }
- }
+ case SUFFICIENT_SPACE: {
+ boolean slotsChanged = ctx.interiorFrame.compact(cmp);
+ if (slotsChanged)
+ targetTupleIndex = ctx.interiorFrame.findTupleIndex(tuple, cmp);
+ ctx.interiorFrame.insert(tuple, cmp, targetTupleIndex);
+ ctx.splitKey.reset();
+ }
+ break;
- @Override
- public void delete(ITupleReference tuple, IndexOpContext ictx)
- throws Exception {
- BTreeOpContext ctx = (BTreeOpContext)ictx;
- ctx.reset();
- ctx.pred.setLowKeyComparator(cmp);
- ctx.pred.setHighKeyComparator(cmp);
- ctx.pred.setLowKey(tuple, true);
- ctx.pred.setHighKey(tuple, true);
- ctx.splitKey.reset();
- ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
+ }
+ }
- boolean repeatOp = true;
- // we use this loop to deal with possibly multiple operation restarts
- // due to ongoing structure modifications during the descent
- while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
- performOp(rootPage, null, ctx);
+ @Override
+ public void delete(ITupleReference tuple, IndexOpContext ictx) throws Exception {
+ BTreeOpContext ctx = (BTreeOpContext) ictx;
+ ctx.reset();
+ ctx.pred.setLowKeyComparator(cmp);
+ ctx.pred.setHighKeyComparator(cmp);
+ ctx.pred.setLowKey(tuple, true);
+ ctx.pred.setHighKey(tuple, true);
+ ctx.splitKey.reset();
+ ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
- // if we reach this stage then we need to restart from the (possibly
- // new) root
- if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op indicator
- continue;
- }
+ boolean repeatOp = true;
+ // we use this loop to deal with possibly multiple operation restarts
+ // due to ongoing structure modifications during the descent
+ while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+ performOp(rootPage, null, ctx);
+
+ // if we reach this stage then we need to restart from the (possibly
+ // new) root
+ if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
+ ctx.pageLsns.removeLast(); // pop the restart op indicator
+ continue;
+ }
+
+ // tree is empty, reset level to zero
+ if (ctx.splitKey.getBuffer() != null) {
+ ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), false);
+ pins++;
+ rootNode.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ ctx.leafFrame.setPage(rootNode);
+ ctx.leafFrame.initBuffer((byte) 0);
+ currentLevel = 0; // debug
+ } finally {
+ rootNode.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(rootNode);
+ unpins++;
+ }
+ }
+
+ unsetSmPages(ctx);
+
+ addFreePages(ctx);
+
+ repeatOp = false;
+ }
+ }
+
+ // TODO: to avoid latch deadlock, must modify cursor to detect empty leaves
+ private void deleteLeaf(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx) throws Exception {
+ ctx.leafFrame.setPage(node);
- // tree is empty, reset level to zero
- if (ctx.splitKey.getBuffer() != null) {
- ICachedPage rootNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, rootPage), false);
- pins++;
- rootNode.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- ctx.leafFrame.setPage(rootNode);
- ctx.leafFrame.initBuffer((byte) 0);
- currentLevel = 0; // debug
- } finally {
- rootNode.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(rootNode);
- unpins++;
- }
- }
+ // will this leaf become empty?
+ if (ctx.leafFrame.getTupleCount() == 1) {
+ IBTreeLeafFrame siblingFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
- unsetSmPages(ctx);
+ ICachedPage leftNode = null;
+ ICachedPage rightNode = null;
+ int nextLeaf = ctx.leafFrame.getNextLeaf();
+ int prevLeaf = ctx.leafFrame.getPrevLeaf();
- addFreePages(ctx);
+ if (prevLeaf > 0)
+ leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, prevLeaf), false);
- repeatOp = false;
- }
- }
+ try {
- // TODO: to avoid latch deadlock, must modify cursor to detect empty leaves
- private void deleteLeaf(ICachedPage node, int pageId,
- ITupleReference tuple, BTreeOpContext ctx) throws Exception {
- ctx.leafFrame.setPage(node);
+ if (nextLeaf > 0)
+ rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextLeaf), false);
- // will this leaf become empty?
- if (ctx.leafFrame.getTupleCount() == 1) {
- IBTreeLeafFrame siblingFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
+ try {
+ treeLatch.writeLock().lock();
+ treeLatchesAcquired++;
- ICachedPage leftNode = null;
- ICachedPage rightNode = null;
- int nextLeaf = ctx.leafFrame.getNextLeaf();
- int prevLeaf = ctx.leafFrame.getPrevLeaf();
+ try {
+ ctx.leafFrame.delete(tuple, cmp, true);
+ // to propagate the deletion we only need to make the
+ // splitKey != null
+ // we can reuse data to identify which key to delete in
+ // the parent
+ ctx.splitKey.initData(1);
+ } catch (Exception e) {
+ // don't propagate deletion upwards if deletion at this
+ // level fails
+ ctx.splitKey.reset();
+ throw e;
+ }
- if (prevLeaf > 0)
- leftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, prevLeaf), false);
+ // TODO: tie together with loggins
+ ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
+ ctx.leafFrame.setLevel(freePageManager.getFreePageLevelIndicator());
- try {
+ ctx.smPages.add(pageId);
+ ctx.leafFrame.setSmFlag(true);
- if (nextLeaf > 0)
- rightNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, nextLeaf), false);
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
- try {
- treeLatch.writeLock().lock();
- treeLatchesAcquired++;
+ if (leftNode != null) {
+ leftNode.acquireWriteLatch();
+ try {
+ siblingFrame.setPage(leftNode);
+ siblingFrame.setNextLeaf(nextLeaf);
+ siblingFrame.setPageLsn(siblingFrame.getPageLsn() + 1); // TODO:
+ // tie
+ // together
+ // with
+ // logging
+ } finally {
+ leftNode.releaseWriteLatch();
+ }
+ }
- try {
- ctx.leafFrame.delete(tuple, cmp, true);
- // to propagate the deletion we only need to make the
- // splitKey != null
- // we can reuse data to identify which key to delete in
- // the parent
- ctx.splitKey.initData(1);
- } catch (Exception e) {
- // don't propagate deletion upwards if deletion at this
- // level fails
- ctx.splitKey.reset();
- throw e;
- }
+ if (rightNode != null) {
+ rightNode.acquireWriteLatch();
+ try {
+ siblingFrame.setPage(rightNode);
+ siblingFrame.setPrevLeaf(prevLeaf);
+ siblingFrame.setPageLsn(siblingFrame.getPageLsn() + 1); // TODO:
+ // tie
+ // together
+ // with
+ // logging
+ } finally {
+ rightNode.releaseWriteLatch();
+ }
+ }
- // TODO: tie together with loggins
- ctx.leafFrame.setPageLsn(ctx.leafFrame.getPageLsn() + 1);
- ctx.leafFrame.setLevel(freePageManager
- .getFreePageLevelIndicator());
+ // register pageId as a free
+ ctx.freePages.add(pageId);
- ctx.smPages.add(pageId);
- ctx.leafFrame.setSmFlag(true);
+ } catch (Exception e) {
+ treeLatch.writeLock().unlock();
+ treeLatchesReleased++;
+ throw e;
+ } finally {
+ if (rightNode != null) {
+ bufferCache.unpin(rightNode);
+ }
+ }
+ } finally {
+ if (leftNode != null) {
+ bufferCache.unpin(leftNode);
+ }
+ }
+ } else { // leaf will not become empty
+ ctx.leafFrame.delete(tuple, cmp, true);
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
+ }
+ }
- node.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
+ private void deleteInterior(ICachedPage node, int pageId, ITupleReference tuple, BTreeOpContext ctx)
+ throws Exception {
+ ctx.interiorFrame.setPage(node);
- if (leftNode != null) {
- leftNode.acquireWriteLatch();
- try {
- siblingFrame.setPage(leftNode);
- siblingFrame.setNextLeaf(nextLeaf);
- siblingFrame
- .setPageLsn(siblingFrame.getPageLsn() + 1); // TODO:
- // tie
- // together
- // with
- // logging
- } finally {
- leftNode.releaseWriteLatch();
- }
- }
+ // this means there is only a child pointer but no key, this case
+ // propagates the split
+ if (ctx.interiorFrame.getTupleCount() == 0) {
+ ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1); // TODO:
+ // tie
+ // together
+ // with
+ // logging
+ ctx.leafFrame.setLevel(freePageManager.getFreePageLevelIndicator());
+ ctx.smPages.add(pageId);
+ ctx.interiorFrame.setSmFlag(true);
+ ctx.interiorFrame.setRightmostChildPageId(-1); // this node is
+ // completely empty
+ // register this pageId as a free page
+ ctx.freePages.add(pageId);
- if (rightNode != null) {
- rightNode.acquireWriteLatch();
- try {
- siblingFrame.setPage(rightNode);
- siblingFrame.setPrevLeaf(prevLeaf);
- siblingFrame
- .setPageLsn(siblingFrame.getPageLsn() + 1); // TODO:
- // tie
- // together
- // with
- // logging
- } finally {
- rightNode.releaseWriteLatch();
- }
- }
+ } else {
+ ctx.interiorFrame.delete(tuple, cmp, false);
+ ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1); // TODO:
+ // tie
+ // together
+ // with
+ // logging
+ ctx.splitKey.reset(); // don't propagate deletion
+ }
+ }
- // register pageId as a free
- ctx.freePages.add(pageId);
+ private final void acquireLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
+ if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
+ node.acquireWriteLatch();
+ writeLatchesAcquired++;
+ } else {
+ node.acquireReadLatch();
+ readLatchesAcquired++;
+ }
+ }
- } catch (Exception e) {
- treeLatch.writeLock().unlock();
- treeLatchesReleased++;
- throw e;
- } finally {
- if (rightNode != null) {
- bufferCache.unpin(rightNode);
- }
- }
- } finally {
- if (leftNode != null) {
- bufferCache.unpin(leftNode);
- }
- }
- } else { // leaf will not become empty
- ctx.leafFrame.delete(tuple, cmp, true);
- node.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- }
- }
+ private final void releaseLatch(ICachedPage node, IndexOp op, boolean isLeaf) {
+ if (isLeaf && (op.equals(IndexOp.INSERT) || op.equals(IndexOp.DELETE))) {
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ } else {
+ node.releaseReadLatch();
+ readLatchesReleased++;
+ }
+ }
- private void deleteInterior(ICachedPage node, int pageId,
- ITupleReference tuple, BTreeOpContext ctx) throws Exception {
- ctx.interiorFrame.setPage(node);
+ private boolean isConsistent(int pageId, BTreeOpContext ctx) throws Exception {
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ pins++;
+ node.acquireReadLatch();
+ readLatchesAcquired++;
+ ctx.interiorFrame.setPage(node);
+ boolean isConsistent = false;
+ try {
+ isConsistent = ctx.pageLsns.getLast() == ctx.interiorFrame.getPageLsn();
+ } finally {
+ node.releaseReadLatch();
+ readLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
+ }
+ return isConsistent;
+ }
- // this means there is only a child pointer but no key, this case
- // propagates the split
- if (ctx.interiorFrame.getTupleCount() == 0) {
- ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1); // TODO:
- // tie
- // together
- // with
- // logging
- ctx.leafFrame.setLevel(freePageManager.getFreePageLevelIndicator());
- ctx.smPages.add(pageId);
- ctx.interiorFrame.setSmFlag(true);
- ctx.interiorFrame.setRightmostChildPageId(-1); // this node is
- // completely empty
- // register this pageId as a free page
- ctx.freePages.add(pageId);
+ private void performOp(int pageId, ICachedPage parent, BTreeOpContext ctx) throws Exception {
+ ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ pins++;
- } else {
- ctx.interiorFrame.delete(tuple, cmp, false);
- ctx.interiorFrame.setPageLsn(ctx.interiorFrame.getPageLsn() + 1); // TODO:
- // tie
- // together
- // with
- // logging
- ctx.splitKey.reset(); // don't propagate deletion
- }
- }
+ ctx.interiorFrame.setPage(node);
+ // this check performs an unprotected read in the page
+ // the following could happen: TODO fill out
+ boolean unsafeIsLeaf = ctx.interiorFrame.isLeaf();
+ acquireLatch(node, ctx.op, unsafeIsLeaf);
+ boolean smFlag = ctx.interiorFrame.getSmFlag();
+ // re-check leafness after latching
+ boolean isLeaf = ctx.interiorFrame.isLeaf();
- private final void acquireLatch(ICachedPage node, IndexOp op,
- boolean isLeaf) {
- if (isLeaf
- && (op.equals(IndexOp.INSERT) || op
- .equals(IndexOp.DELETE))) {
- node.acquireWriteLatch();
- writeLatchesAcquired++;
- } else {
- node.acquireReadLatch();
- readLatchesAcquired++;
- }
- }
+ // remember trail of pageLsns, to unwind recursion in case of an ongoing
+ // structure modification
+ ctx.pageLsns.add(ctx.interiorFrame.getPageLsn());
- private final void releaseLatch(ICachedPage node, IndexOp op,
- boolean isLeaf) {
- if (isLeaf
- && (op.equals(IndexOp.INSERT) || op
- .equals(IndexOp.DELETE))) {
- node.releaseWriteLatch();
- writeLatchesReleased++;
- } else {
- node.releaseReadLatch();
- readLatchesReleased++;
- }
- }
+ try {
- private boolean isConsistent(int pageId, BTreeOpContext ctx)
- throws Exception {
- ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, pageId), false);
- pins++;
- node.acquireReadLatch();
- readLatchesAcquired++;
- ctx.interiorFrame.setPage(node);
- boolean isConsistent = false;
- try {
- isConsistent = ctx.pageLsns.getLast() == ctx.interiorFrame
- .getPageLsn();
- } finally {
- node.releaseReadLatch();
- readLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- }
- return isConsistent;
- }
+ // latch coupling, note: parent should never be write latched,
+ // otherwise something is wrong.
+ if (parent != null) {
+ parent.releaseReadLatch();
+ readLatchesReleased++;
+ bufferCache.unpin(parent);
+ unpins++;
+ }
- private void performOp(int pageId, ICachedPage parent, BTreeOpContext ctx)
- throws Exception {
- ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, pageId), false);
- pins++;
+ if (!isLeaf || smFlag) {
+ if (!smFlag) {
+ // we use this loop to deal with possibly multiple operation
+ // restarts due to ongoing structure modifications during
+ // the descent
+ boolean repeatOp = true;
+ while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
+ int childPageId = ctx.interiorFrame.getChildPageId(ctx.pred, cmp);
+ performOp(childPageId, node, ctx);
- ctx.interiorFrame.setPage(node);
- // this check performs an unprotected read in the page
- // the following could happen: TODO fill out
- boolean unsafeIsLeaf = ctx.interiorFrame.isLeaf();
- acquireLatch(node, ctx.op, unsafeIsLeaf);
- boolean smFlag = ctx.interiorFrame.getSmFlag();
- // re-check leafness after latching
- boolean isLeaf = ctx.interiorFrame.isLeaf();
+ if (!ctx.pageLsns.isEmpty() && ctx.pageLsns.getLast() == RESTART_OP) {
+ ctx.pageLsns.removeLast(); // pop the restart op
+ // indicator
+ if (isConsistent(pageId, ctx)) {
+ node = null; // to avoid unpinning and
+ // unlatching node again in
+ // recursive call
+ continue; // descend the tree again
+ } else {
+ ctx.pageLsns.removeLast(); // pop pageLsn of
+ // this page
+ // (version seen by this op
+ // during descent)
+ ctx.pageLsns.add(RESTART_OP); // this node is
+ // not
+ // consistent,
+ // set the
+ // restart
+ // indicator for
+ // upper level
+ break;
+ }
+ }
- // remember trail of pageLsns, to unwind recursion in case of an ongoing
- // structure modification
- ctx.pageLsns.add(ctx.interiorFrame.getPageLsn());
+ switch (ctx.op) {
- try {
+ case INSERT: {
+ if (ctx.splitKey.getBuffer() != null) {
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ pins++;
+ node.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ insertInterior(node, pageId, ctx.splitKey.getTuple(), ctx);
+ } finally {
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
+ }
+ } else {
+ unsetSmPages(ctx);
+ }
+ }
+ break;
- // latch coupling, note: parent should never be write latched,
- // otherwise something is wrong.
- if (parent != null) {
- parent.releaseReadLatch();
- readLatchesReleased++;
- bufferCache.unpin(parent);
- unpins++;
- }
+ case DELETE: {
+ if (ctx.splitKey.getBuffer() != null) {
+ node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ pins++;
+ node.acquireWriteLatch();
+ writeLatchesAcquired++;
+ try {
+ deleteInterior(node, pageId, ctx.pred.getLowKey(), ctx);
+ } finally {
+ node.releaseWriteLatch();
+ writeLatchesReleased++;
+ bufferCache.unpin(node);
+ unpins++;
+ }
+ } else {
+ unsetSmPages(ctx);
+ }
+ }
+ break;
- if (!isLeaf || smFlag) {
- if (!smFlag) {
- // we use this loop to deal with possibly multiple operation
- // restarts due to ongoing structure modifications during
- // the descent
- boolean repeatOp = true;
- while (repeatOp && ctx.opRestarts < MAX_RESTARTS) {
- int childPageId = ctx.interiorFrame.getChildPageId(
- ctx.pred, cmp);
- performOp(childPageId, node, ctx);
+ case SEARCH: {
+ // do nothing
+ }
+ break;
- if (!ctx.pageLsns.isEmpty()
- && ctx.pageLsns.getLast() == RESTART_OP) {
- ctx.pageLsns.removeLast(); // pop the restart op
- // indicator
- if (isConsistent(pageId, ctx)) {
- node = null; // to avoid unpinning and
- // unlatching node again in
- // recursive call
- continue; // descend the tree again
- } else {
- ctx.pageLsns.removeLast(); // pop pageLsn of
- // this page
- // (version seen by this op
- // during descent)
- ctx.pageLsns.add(RESTART_OP); // this node is
- // not
- // consistent,
- // set the
- // restart
- // indicator for
- // upper level
- break;
- }
- }
+ }
- switch (ctx.op) {
+ repeatOp = false; // operation completed
- case INSERT: {
- if (ctx.splitKey.getBuffer() != null) {
- node = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, pageId), false);
- pins++;
- node.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- insertInterior(node, pageId, ctx.splitKey
- .getTuple(), ctx);
- } finally {
- node.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- }
- } else {
- unsetSmPages(ctx);
- }
- }
- break;
+ } // end while
+ } else { // smFlag
+ ctx.opRestarts++;
+ System.out.println("ONGOING SM ON PAGE " + pageId + " AT LEVEL " + ctx.interiorFrame.getLevel()
+ + ", RESTARTS: " + ctx.opRestarts);
+ releaseLatch(node, ctx.op, unsafeIsLeaf);
+ bufferCache.unpin(node);
+ unpins++;
- case DELETE: {
- if (ctx.splitKey.getBuffer() != null) {
- node = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, pageId), false);
- pins++;
- node.acquireWriteLatch();
- writeLatchesAcquired++;
- try {
- deleteInterior(node, pageId, ctx.pred
- .getLowKey(), ctx);
- } finally {
- node.releaseWriteLatch();
- writeLatchesReleased++;
- bufferCache.unpin(node);
- unpins++;
- }
- } else {
- unsetSmPages(ctx);
- }
- }
- break;
+ // TODO: this should be an instant duration lock, how to do
+ // this in java?
+ // instead we just immediately release the lock. this is
+ // inefficient but still correct and will not cause
+ // latch-deadlock
+ treeLatch.readLock().lock();
+ treeLatch.readLock().unlock();
- case SEARCH: {
- // do nothing
- }
- break;
+ // unwind recursion and restart operation, find lowest page
+ // with a pageLsn as seen by this operation during descent
+ ctx.pageLsns.removeLast(); // pop current page lsn
+ // put special value on the stack to inform caller of
+ // restart
+ ctx.pageLsns.add(RESTART_OP);
+ }
+ } else { // isLeaf and !smFlag
+ switch (ctx.op) {
+ case INSERT: {
+ insertLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
+ }
+ break;
- }
+ case DELETE: {
+ deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
+ }
+ break;
- repeatOp = false; // operation completed
+ case SEARCH: {
+ ctx.cursorInitialState.setPage(node);
+ ctx.cursor.open(ctx.cursorInitialState, ctx.pred);
+ }
+ break;
+ }
+ }
+ } catch (TreeIndexException e) {
+ // System.out.println("BTREE EXCEPTION");
+ // System.out.println(e.getMessage());
+ // e.printStackTrace();
+ if (!e.getHandled()) {
+ releaseLatch(node, ctx.op, unsafeIsLeaf);
+ bufferCache.unpin(node);
+ unpins++;
+ e.setHandled(true);
+ }
+ throw e;
+ } catch (Exception e) { // this could be caused, e.g. by a
+ // failure to pin a new node during a split
+ System.out.println("ASTERIX EXCEPTION");
+ e.printStackTrace();
+ releaseLatch(node, ctx.op, unsafeIsLeaf);
+ bufferCache.unpin(node);
+ unpins++;
+ BTreeException propException = new BTreeException(e);
+ propException.setHandled(true); // propagate a BTreeException,
+ // indicating that the parent node
+ // must not be unlatched and
+ // unpinned
+ throw propException;
+ }
+ }
- } // end while
- } else { // smFlag
- ctx.opRestarts++;
- System.out.println("ONGOING SM ON PAGE " + pageId
- + " AT LEVEL " + ctx.interiorFrame.getLevel()
- + ", RESTARTS: " + ctx.opRestarts);
- releaseLatch(node, ctx.op, unsafeIsLeaf);
- bufferCache.unpin(node);
- unpins++;
+ private boolean bulkNewPage = false;
- // TODO: this should be an instant duration lock, how to do
- // this in java?
- // instead we just immediately release the lock. this is
- // inefficient but still correct and will not cause
- // latch-deadlock
- treeLatch.readLock().lock();
- treeLatch.readLock().unlock();
+ public final class BulkLoadContext implements IIndexBulkLoadContext {
+ public final int slotSize;
+ public final int leafMaxBytes;
+ public final int interiorMaxBytes;
+ public final BTreeSplitKey splitKey;
+ // we maintain a frontier of nodes for each level
+ private final ArrayList<NodeFrontier> nodeFrontiers = new ArrayList<NodeFrontier>();
+ private final IBTreeLeafFrame leafFrame;
+ private final IBTreeInteriorFrame interiorFrame;
+ private final ITreeIndexMetaDataFrame metaFrame;
- // unwind recursion and restart operation, find lowest page
- // with a pageLsn as seen by this operation during descent
- ctx.pageLsns.removeLast(); // pop current page lsn
- // put special value on the stack to inform caller of
- // restart
- ctx.pageLsns.add(RESTART_OP);
- }
- } else { // isLeaf and !smFlag
- switch (ctx.op) {
- case INSERT: {
- insertLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
- }
- break;
+ private final ITreeIndexTupleWriter tupleWriter;
- case DELETE: {
- deleteLeaf(node, pageId, ctx.pred.getLowKey(), ctx);
- }
- break;
+ public BulkLoadContext(float fillFactor, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
- case SEARCH: {
- ctx.cursorInitialState.setPage(node);
- ctx.cursor.open(ctx.cursorInitialState, ctx.pred);
- }
- break;
- }
- }
- } catch (TreeIndexException e) {
- // System.out.println("BTREE EXCEPTION");
- // System.out.println(e.getMessage());
- // e.printStackTrace();
- if (!e.getHandled()) {
- releaseLatch(node, ctx.op, unsafeIsLeaf);
- bufferCache.unpin(node);
- unpins++;
- e.setHandled(true);
- }
- throw e;
- } catch (Exception e) { // this could be caused, e.g. by a
- // failure to pin a new node during a split
- System.out.println("ASTERIX EXCEPTION");
- e.printStackTrace();
- releaseLatch(node, ctx.op, unsafeIsLeaf);
- bufferCache.unpin(node);
- unpins++;
- BTreeException propException = new BTreeException(e);
- propException.setHandled(true); // propagate a BTreeException,
- // indicating that the parent node
- // must not be unlatched and
- // unpinned
- throw propException;
- }
- }
+ splitKey = new BTreeSplitKey(leafFrame.getTupleWriter().createTupleReference());
+ tupleWriter = leafFrame.getTupleWriter();
- private boolean bulkNewPage = false;
+ NodeFrontier leafFrontier = new NodeFrontier(leafFrame.createTupleReference());
+ leafFrontier.pageId = freePageManager.getFreePage(metaFrame);
+ leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
+ bulkNewPage);
+ leafFrontier.page.acquireWriteLatch();
- public final class BulkLoadContext implements IIndexBulkLoadContext {
- public final int slotSize;
- public final int leafMaxBytes;
- public final int interiorMaxBytes;
- public final BTreeSplitKey splitKey;
- // we maintain a frontier of nodes for each level
- private final ArrayList<NodeFrontier> nodeFrontiers = new ArrayList<NodeFrontier>();
- private final IBTreeLeafFrame leafFrame;
- private final IBTreeInteriorFrame interiorFrame;
- private final ITreeIndexMetaDataFrame metaFrame;
+ interiorFrame.setPage(leafFrontier.page);
+ interiorFrame.initBuffer((byte) 0);
+ interiorMaxBytes = (int) ((float) interiorFrame.getBuffer().capacity() * fillFactor);
- private final ITreeIndexTupleWriter tupleWriter;
+ leafFrame.setPage(leafFrontier.page);
+ leafFrame.initBuffer((byte) 0);
+ leafMaxBytes = (int) ((float) leafFrame.getBuffer().capacity() * fillFactor);
- public BulkLoadContext(float fillFactor, IBTreeLeafFrame leafFrame,
- IBTreeInteriorFrame interiorFrame,
- ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+ slotSize = leafFrame.getSlotSize();
- splitKey = new BTreeSplitKey(leafFrame.getTupleWriter()
- .createTupleReference());
- tupleWriter = leafFrame.getTupleWriter();
+ this.leafFrame = leafFrame;
+ this.interiorFrame = interiorFrame;
+ this.metaFrame = metaFrame;
- NodeFrontier leafFrontier = new NodeFrontier(leafFrame
- .createTupleReference());
- leafFrontier.pageId = freePageManager.getFreePage(metaFrame);
- leafFrontier.page = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
- leafFrontier.page.acquireWriteLatch();
+ nodeFrontiers.add(leafFrontier);
+ }
- interiorFrame.setPage(leafFrontier.page);
- interiorFrame.initBuffer((byte) 0);
- interiorMaxBytes = (int) ((float) interiorFrame.getBuffer()
- .capacity() * fillFactor);
+ private void addLevel() throws HyracksDataException {
+ NodeFrontier frontier = new NodeFrontier(tupleWriter.createTupleReference());
+ frontier.pageId = freePageManager.getFreePage(metaFrame);
+ frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
+ frontier.page.acquireWriteLatch();
+ frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount());
+ interiorFrame.setPage(frontier.page);
+ interiorFrame.initBuffer((byte) nodeFrontiers.size());
+ nodeFrontiers.add(frontier);
+ }
+ }
- leafFrame.setPage(leafFrontier.page);
- leafFrame.initBuffer((byte) 0);
- leafMaxBytes = (int) ((float) leafFrame.getBuffer().capacity() * fillFactor);
+ private void propagateBulk(BulkLoadContext ctx, int level) throws HyracksDataException {
- slotSize = leafFrame.getSlotSize();
+ if (ctx.splitKey.getBuffer() == null)
+ return;
- this.leafFrame = leafFrame;
- this.interiorFrame = interiorFrame;
- this.metaFrame = metaFrame;
+ if (level >= ctx.nodeFrontiers.size())
+ ctx.addLevel();
- nodeFrontiers.add(leafFrontier);
- }
+ NodeFrontier frontier = ctx.nodeFrontiers.get(level);
+ ctx.interiorFrame.setPage(frontier.page);
- private void addLevel() throws HyracksDataException {
- NodeFrontier frontier = new NodeFrontier(tupleWriter
- .createTupleReference());
- frontier.pageId = freePageManager.getFreePage(metaFrame);
- frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, frontier.pageId), bulkNewPage);
- frontier.page.acquireWriteLatch();
- frontier.lastTuple.setFieldCount(cmp.getKeyFieldCount());
- interiorFrame.setPage(frontier.page);
- interiorFrame.initBuffer((byte) nodeFrontiers.size());
- nodeFrontiers.add(frontier);
- }
- }
+ ITupleReference tuple = ctx.splitKey.getTuple();
+ int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple, 0, cmp.getKeyFieldCount()) + ctx.slotSize + 4;
+ int spaceUsed = ctx.interiorFrame.getBuffer().capacity() - ctx.interiorFrame.getTotalFreeSpace();
+ if (spaceUsed + spaceNeeded > ctx.interiorMaxBytes) {
- private void propagateBulk(BulkLoadContext ctx, int level)
- throws HyracksDataException {
+ BTreeSplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame.getTupleWriter().createTupleReference());
+ tuple = copyKey.getTuple();
- if (ctx.splitKey.getBuffer() == null)
- return;
+ frontier.lastTuple.resetByTupleOffset(frontier.page.getBuffer(),
+ ctx.interiorFrame.getTupleOffset(ctx.interiorFrame.getTupleCount() - 1));
+ int splitKeySize = ctx.tupleWriter.bytesRequired(frontier.lastTuple, 0, cmp.getKeyFieldCount());
+ ctx.splitKey.initData(splitKeySize);
+ ctx.tupleWriter
+ .writeTupleFields(frontier.lastTuple, 0, cmp.getKeyFieldCount(), ctx.splitKey.getBuffer(), 0);
+ ctx.splitKey.getTuple().resetByTupleOffset(ctx.splitKey.getBuffer(), 0);
+ ctx.splitKey.setLeftPage(frontier.pageId);
- if (level >= ctx.nodeFrontiers.size())
- ctx.addLevel();
+ ctx.interiorFrame.deleteGreatest(cmp);
- NodeFrontier frontier = ctx.nodeFrontiers.get(level);
- ctx.interiorFrame.setPage(frontier.page);
+ frontier.page.releaseWriteLatch();
+ bufferCache.unpin(frontier.page);
+ frontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
- ITupleReference tuple = ctx.splitKey.getTuple();
- int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple, 0, cmp
- .getKeyFieldCount())
- + ctx.slotSize + 4;
- int spaceUsed = ctx.interiorFrame.getBuffer().capacity()
- - ctx.interiorFrame.getTotalFreeSpace();
- if (spaceUsed + spaceNeeded > ctx.interiorMaxBytes) {
+ ctx.splitKey.setRightPage(frontier.pageId);
+ propagateBulk(ctx, level + 1);
- BTreeSplitKey copyKey = ctx.splitKey.duplicate(ctx.leafFrame
- .getTupleWriter().createTupleReference());
- tuple = copyKey.getTuple();
+ frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, frontier.pageId), bulkNewPage);
+ frontier.page.acquireWriteLatch();
+ ctx.interiorFrame.setPage(frontier.page);
+ ctx.interiorFrame.initBuffer((byte) level);
+ }
+ ctx.interiorFrame.insertSorted(tuple, cmp);
- frontier.lastTuple.resetByTupleOffset(frontier.page.getBuffer(),
- ctx.interiorFrame.getTupleOffset(ctx.interiorFrame
- .getTupleCount() - 1));
- int splitKeySize = ctx.tupleWriter.bytesRequired(
- frontier.lastTuple, 0, cmp.getKeyFieldCount());
- ctx.splitKey.initData(splitKeySize);
- ctx.tupleWriter.writeTupleFields(frontier.lastTuple, 0, cmp
- .getKeyFieldCount(), ctx.splitKey.getBuffer(), 0);
- ctx.splitKey.getTuple().resetByTupleOffset(
- ctx.splitKey.getBuffer(), 0);
- ctx.splitKey.setLeftPage(frontier.pageId);
+ // debug print
+ // ISerializerDeserializer[] btreeSerde = {
+ // UTF8StringSerializerDeserializer.INSTANCE,
+ // IntegerSerializerDeserializer.INSTANCE };
+ // String s = ctx.interiorFrame.printKeys(cmp, btreeSerde);
+ // System.out.println(s);
+ }
- ctx.interiorFrame.deleteGreatest(cmp);
+ // assumes btree has been created and opened
+ @Override
+ public IIndexBulkLoadContext beginBulkLoad(float fillFactor, ITreeIndexFrame leafFrame,
+ ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
- frontier.page.releaseWriteLatch();
- bufferCache.unpin(frontier.page);
- frontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
+ if (loaded)
+ throw new HyracksDataException("Trying to bulk-load BTree but has BTree already been loaded.");
- ctx.splitKey.setRightPage(frontier.pageId);
- propagateBulk(ctx, level + 1);
+ BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IBTreeLeafFrame) leafFrame,
+ (IBTreeInteriorFrame) interiorFrame, metaFrame);
+ ctx.nodeFrontiers.get(0).lastTuple.setFieldCount(cmp.getFieldCount());
+ ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
+ return ctx;
+ }
- frontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(
- fileId, frontier.pageId), bulkNewPage);
- frontier.page.acquireWriteLatch();
- ctx.interiorFrame.setPage(frontier.page);
- ctx.interiorFrame.initBuffer((byte) level);
- }
- ctx.interiorFrame.insertSorted(tuple, cmp);
+ @Override
+ public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple) throws HyracksDataException {
+ BulkLoadContext ctx = (BulkLoadContext) ictx;
+ NodeFrontier leafFrontier = ctx.nodeFrontiers.get(0);
+ IBTreeLeafFrame leafFrame = ctx.leafFrame;
- // debug print
- // ISerializerDeserializer[] btreeSerde = {
- // UTF8StringSerializerDeserializer.INSTANCE,
- // IntegerSerializerDeserializer.INSTANCE };
- // String s = ctx.interiorFrame.printKeys(cmp, btreeSerde);
- // System.out.println(s);
- }
+ int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple) + ctx.slotSize;
+ int spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
- // assumes btree has been created and opened
- @Override
- public IIndexBulkLoadContext beginBulkLoad(float fillFactor,
- ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
- ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
-
- if (loaded)
- throw new HyracksDataException(
- "Trying to bulk-load BTree but has BTree already been loaded.");
+ // try to free space by compression
+ if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
+ leafFrame.compress(cmp);
+ spaceUsed = leafFrame.getBuffer().capacity() - leafFrame.getTotalFreeSpace();
+ }
- BulkLoadContext ctx = new BulkLoadContext(fillFactor, (IBTreeLeafFrame)leafFrame,
- (IBTreeInteriorFrame)interiorFrame, metaFrame);
- ctx.nodeFrontiers.get(0).lastTuple.setFieldCount(cmp.getFieldCount());
- ctx.splitKey.getTuple().setFieldCount(cmp.getKeyFieldCount());
- return ctx;
- }
+ if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
+ leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame.getTupleCount() - 1);
+ int splitKeySize = ctx.tupleWriter.bytesRequired(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount());
+ ctx.splitKey.initData(splitKeySize);
+ ctx.tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, cmp.getKeyFieldCount(),
+ ctx.splitKey.getBuffer(), 0);
+ ctx.splitKey.getTuple().resetByTupleOffset(ctx.splitKey.getBuffer(), 0);
+ ctx.splitKey.setLeftPage(leafFrontier.pageId);
+ int prevPageId = leafFrontier.pageId;
+ leafFrontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
- @Override
- public void bulkLoadAddTuple(IIndexBulkLoadContext ictx, ITupleReference tuple)
- throws HyracksDataException {
- BulkLoadContext ctx = (BulkLoadContext)ictx;
- NodeFrontier leafFrontier = ctx.nodeFrontiers.get(0);
- IBTreeLeafFrame leafFrame = ctx.leafFrame;
+ leafFrame.setNextLeaf(leafFrontier.pageId);
+ leafFrontier.page.releaseWriteLatch();
+ bufferCache.unpin(leafFrontier.page);
- int spaceNeeded = ctx.tupleWriter.bytesRequired(tuple) + ctx.slotSize;
- int spaceUsed = leafFrame.getBuffer().capacity()
- - leafFrame.getTotalFreeSpace();
+ ctx.splitKey.setRightPage(leafFrontier.pageId);
+ propagateBulk(ctx, 1);
- // try to free space by compression
- if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
- leafFrame.compress(cmp);
- spaceUsed = leafFrame.getBuffer().capacity()
- - leafFrame.getTotalFreeSpace();
- }
+ leafFrontier.page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, leafFrontier.pageId),
+ bulkNewPage);
+ leafFrontier.page.acquireWriteLatch();
+ leafFrame.setPage(leafFrontier.page);
+ leafFrame.initBuffer((byte) 0);
+ leafFrame.setPrevLeaf(prevPageId);
+ }
- if (spaceUsed + spaceNeeded > ctx.leafMaxBytes) {
- leafFrontier.lastTuple.resetByTupleIndex(leafFrame, leafFrame
- .getTupleCount() - 1);
- int splitKeySize = ctx.tupleWriter.bytesRequired(
- leafFrontier.lastTuple, 0, cmp.getKeyFieldCount());
- ctx.splitKey.initData(splitKeySize);
- ctx.tupleWriter.writeTupleFields(leafFrontier.lastTuple, 0, cmp
- .getKeyFieldCount(), ctx.splitKey.getBuffer(), 0);
- ctx.splitKey.getTuple().resetByTupleOffset(
- ctx.splitKey.getBuffer(), 0);
- ctx.splitKey.setLeftPage(leafFrontier.pageId);
- int prevPageId = leafFrontier.pageId;
- leafFrontier.pageId = freePageManager.getFreePage(ctx.metaFrame);
+ leafFrame.setPage(leafFrontier.page);
+ leafFrame.insertSorted(tuple, cmp);
- leafFrame.setNextLeaf(leafFrontier.pageId);
- leafFrontier.page.releaseWriteLatch();
- bufferCache.unpin(leafFrontier.page);
+ // debug print
+ // ISerializerDeserializer[] btreeSerde = {
+ // UTF8StringSerializerDeserializer.INSTANCE,
+ // IntegerSerializerDeserializer.INSTANCE };
+ // String s = leafFrame.printKeys(cmp, btreeSerde);
+ // System.out.println(s);
+ }
- ctx.splitKey.setRightPage(leafFrontier.pageId);
- propagateBulk(ctx, 1);
+ @Override
+ public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+ // copy root
+ BulkLoadContext ctx = (BulkLoadContext) ictx;
+ ICachedPage rootNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rootPage), bulkNewPage);
+ rootNode.acquireWriteLatch();
+ NodeFrontier lastNodeFrontier = ctx.nodeFrontiers.get(ctx.nodeFrontiers.size() - 1);
+ IBTreeInteriorFrame interiorFrame = ctx.interiorFrame;
+ try {
+ ICachedPage toBeRoot = lastNodeFrontier.page;
+ System.arraycopy(toBeRoot.getBuffer().array(), 0, rootNode.getBuffer().array(), 0, toBeRoot.getBuffer()
+ .capacity());
+ } finally {
+ rootNode.releaseWriteLatch();
+ bufferCache.unpin(rootNode);
- leafFrontier.page = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, leafFrontier.pageId), bulkNewPage);
- leafFrontier.page.acquireWriteLatch();
- leafFrame.setPage(leafFrontier.page);
- leafFrame.initBuffer((byte) 0);
- leafFrame.setPrevLeaf(prevPageId);
- }
+ // register old root as free page
+ freePageManager.addFreePage(ctx.metaFrame, lastNodeFrontier.pageId);
- leafFrame.setPage(leafFrontier.page);
- leafFrame.insertSorted(tuple, cmp);
+ // make old root a free page
+ interiorFrame.setPage(lastNodeFrontier.page);
+ interiorFrame.initBuffer(freePageManager.getFreePageLevelIndicator());
- // debug print
- // ISerializerDeserializer[] btreeSerde = {
- // UTF8StringSerializerDeserializer.INSTANCE,
- // IntegerSerializerDeserializer.INSTANCE };
- // String s = leafFrame.printKeys(cmp, btreeSerde);
- // System.out.println(s);
- }
+ // cleanup
+ for (int i = 0; i < ctx.nodeFrontiers.size(); i++) {
+ ctx.nodeFrontiers.get(i).page.releaseWriteLatch();
+ bufferCache.unpin(ctx.nodeFrontiers.get(i).page);
+ }
+ }
+ // debug
+ currentLevel = (byte) ctx.nodeFrontiers.size();
- @Override
- public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
- // copy root
- BulkLoadContext ctx = (BulkLoadContext)ictx;
- ICachedPage rootNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, rootPage), bulkNewPage);
- rootNode.acquireWriteLatch();
- NodeFrontier lastNodeFrontier = ctx.nodeFrontiers.get(ctx.nodeFrontiers
- .size() - 1);
- IBTreeInteriorFrame interiorFrame = ctx.interiorFrame;
- try {
- ICachedPage toBeRoot = lastNodeFrontier.page;
- System.arraycopy(toBeRoot.getBuffer().array(), 0, rootNode
- .getBuffer().array(), 0, toBeRoot.getBuffer().capacity());
- } finally {
- rootNode.releaseWriteLatch();
- bufferCache.unpin(rootNode);
+ loaded = true;
+ }
- // register old root as free page
- freePageManager.addFreePage(ctx.metaFrame, lastNodeFrontier.pageId);
-
- // make old root a free page
- interiorFrame.setPage(lastNodeFrontier.page);
- interiorFrame.initBuffer(freePageManager.getFreePageLevelIndicator());
-
- // cleanup
- for (int i = 0; i < ctx.nodeFrontiers.size(); i++) {
- ctx.nodeFrontiers.get(i).page.releaseWriteLatch();
- bufferCache.unpin(ctx.nodeFrontiers.get(i).page);
- }
- }
- // debug
- currentLevel = (byte) ctx.nodeFrontiers.size();
+ @Override
+ public BTreeOpContext createOpContext(IndexOp op, ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) {
+ return new BTreeOpContext(op, (IBTreeLeafFrame) leafFrame, (IBTreeInteriorFrame) interiorFrame, metaFrame, 6);
+ }
- loaded = true;
- }
-
- @Override
- public BTreeOpContext createOpContext(IndexOp op,
- ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
- ITreeIndexMetaDataFrame metaFrame) {
- return new BTreeOpContext(op, (IBTreeLeafFrame)leafFrame, (IBTreeInteriorFrame)interiorFrame, metaFrame, 6);
- }
+ public ITreeIndexFrameFactory getInteriorFrameFactory() {
+ return interiorFrameFactory;
+ }
- public ITreeIndexFrameFactory getInteriorFrameFactory() {
- return interiorFrameFactory;
- }
+ public ITreeIndexFrameFactory getLeafFrameFactory() {
+ return leafFrameFactory;
+ }
- public ITreeIndexFrameFactory getLeafFrameFactory() {
- return leafFrameFactory;
- }
+ public MultiComparator getMultiComparator() {
+ return cmp;
+ }
- public MultiComparator getMultiComparator() {
- return cmp;
- }
+ public IFreePageManager getFreePageManager() {
+ return freePageManager;
+ }
- public IFreePageManager getFreePageManager() {
- return freePageManager;
- }
-
- public int getRootPageId() {
- return rootPage;
- }
+ public int getRootPageId() {
+ return rootPage;
+ }
@Override
public void update(ITupleReference tuple, IndexOpContext ictx) throws Exception {
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/CursorInitialState.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
similarity index 76%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/CursorInitialState.java
rename to hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
index f66814f..855f9e6 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/CursorInitialState.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeCursorInitialState.java
@@ -3,11 +3,11 @@
import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-public class CursorInitialState implements ICursorInitialState {
+public class BTreeCursorInitialState implements ICursorInitialState {
private ICachedPage page;
- public CursorInitialState(ICachedPage page) {
+ public BTreeCursorInitialState(ICachedPage page) {
this.page = page;
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
index efe762c..6d65b14 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeOpContext.java
@@ -29,7 +29,7 @@
public final IBTreeInteriorFrame interiorFrame;
public final ITreeIndexMetaDataFrame metaFrame;
public ITreeIndexCursor cursor;
- public CursorInitialState cursorInitialState;
+ public BTreeCursorInitialState cursorInitialState;
public RangePredicate pred;
public final BTreeSplitKey splitKey;
public int opRestarts = 0;
@@ -38,7 +38,7 @@
public final IntArrayList freePages;
public BTreeOpContext(IndexOp op, IBTreeLeafFrame leafFrame, IBTreeInteriorFrame interiorFrame,
- ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
+ ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
this.op = op;
this.leafFrame = leafFrame;
this.interiorFrame = interiorFrame;
@@ -54,7 +54,7 @@
smPages = null;
freePages = null;
splitKey = null;
- cursorInitialState = new CursorInitialState(null);
+ cursorInitialState = new BTreeCursorInitialState(null);
}
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
index 46e76c9..4c7503d 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeRangeSearchCursor.java
@@ -181,7 +181,7 @@
bufferCache.unpin(page);
}
- page = ((CursorInitialState) initialState).getPage();
+ page = ((BTreeCursorInitialState) initialState).getPage();
frame.setPage(page);
pred = (RangePredicate) searchPred;
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
index 83db63d..5f7c88d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ICursorInitialState.java
@@ -15,6 +15,10 @@
package edu.uci.ics.hyracks.storage.am.common.api;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public interface ICursorInitialState {
+ public ICachedPage getPage();
+
+ public void setPage(ICachedPage page);
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
index c404a5f..7abc0e3 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IFreePageManager.java
@@ -3,16 +3,23 @@
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
public interface IFreePageManager {
- public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
- public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException;
- public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
- public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException;
- public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory();
-
- // required to return negative values
- public byte getMetaPageLevelIndicator();
- public byte getFreePageLevelIndicator();
- // determined by examining level indicator
- public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame);
- public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame);
+ public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+
+ public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException;
+
+ public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException;
+
+ public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException;
+
+ public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory();
+
+ // required to return negative values
+ public byte getMetaPageLevelIndicator();
+
+ public byte getFreePageLevelIndicator();
+
+ // determined by examining level indicator
+ public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame);
+
+ public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame);
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
index 33883c4..f4836e0 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISearchPredicate.java
@@ -17,5 +17,10 @@
import java.io.Serializable;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
public interface ISearchPredicate extends Serializable {
-}
+ public MultiComparator getLowKeyComparator();
+
+ public MultiComparator getHighKeyComparator();
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
index 57b9bc5..246c09d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ISplitKey.java
@@ -3,14 +3,23 @@
import java.nio.ByteBuffer;
public interface ISplitKey {
- public void initData(int keySize);
- public void reset();
- public ByteBuffer getBuffer();
- public ITreeIndexTupleReference getTuple();
- public int getLeftPage();
- public int getRightPage();
- public void setLeftPage(int leftPage);
- public void setRightPage(int rightPage);
- public void setPages(int leftPage, int rightPage);
- public ISplitKey duplicate(ITreeIndexTupleReference copyTuple);
+ public void initData(int keySize);
+
+ public void reset();
+
+ public ByteBuffer getBuffer();
+
+ public ITreeIndexTupleReference getTuple();
+
+ public int getLeftPage();
+
+ public int getRightPage();
+
+ public void setLeftPage(int leftPage);
+
+ public void setRightPage(int rightPage);
+
+ public void setPages(int leftPage, int rightPage);
+
+ public ISplitKey duplicate(ITreeIndexTupleReference copyTuple);
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
index 636a1f7..ee5a1e2 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrame.java
@@ -32,8 +32,9 @@
public ByteBuffer getBuffer();
public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception;
+
public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception;
-
+
public void update(int rid, ITupleReference tuple) throws Exception;
public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception;
@@ -68,7 +69,7 @@
// TODO; what if tuples more than half-page size?
public int split(ITreeIndexFrame rightFrame, ITupleReference tuple, MultiComparator cmp, ISplitKey splitKey)
throws Exception;
-
+
public ISlotManager getSlotManager();
// ATTENTION: in b-tree operations it may not always be possible to
@@ -76,8 +77,9 @@
// a compatible interior and leaf implementation MUST return identical
// values when given the same ByteBuffer for the functions below
public boolean isLeaf();
+
public boolean isInterior();
-
+
public byte getLevel();
public void setLevel(byte level);
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
index 9ec69d9..83b95b6 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexFrameFactory.java
@@ -3,5 +3,5 @@
import java.io.Serializable;
public interface ITreeIndexFrameFactory extends Serializable {
- public ITreeIndexFrame createFrame();
+ public ITreeIndexFrame createFrame();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
index 07473ef..6fd88e8 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/ITreeIndexMetaDataFrameFactory.java
@@ -15,7 +15,6 @@
package edu.uci.ics.hyracks.storage.am.common.api;
-
public interface ITreeIndexMetaDataFrameFactory {
public ITreeIndexMetaDataFrame createFrame();
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
index 6f83e0b..d5f9f44 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/api/IndexType.java
@@ -1,5 +1,5 @@
package edu.uci.ics.hyracks.storage.am.common.api;
public enum IndexType {
- BTREE, RTREE, INVERTED
+ BTREE, RTREE, INVERTED
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
index 0ab2a9a..c905eb2 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/AbstractTreeIndexOperatorDescriptor.java
@@ -43,12 +43,13 @@
protected final ITypeTrait[] typeTraits;
protected final ITreeIndexOpHelperFactory opHelperFactory;
-
+
public AbstractTreeIndexOperatorDescriptor(JobSpecification spec, int inputArity, int outputArity,
RecordDescriptor recDesc, IStorageManagerInterface storageManager,
IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
- ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories, ITreeIndexOpHelperFactory opHelperFactory) {
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
+ ITreeIndexOpHelperFactory opHelperFactory) {
super(spec, inputArity, outputArity);
this.fileSplitProvider = fileSplitProvider;
this.storageManager = storageManager;
@@ -101,9 +102,9 @@
public RecordDescriptor getRecordDescriptor() {
return recordDescriptors[0];
}
-
+
@Override
public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory() {
- return opHelperFactory;
+ return opHelperFactory;
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
index 583ef7d..fa37fab 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOpHelperFactory.java
@@ -5,6 +5,6 @@
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
public interface ITreeIndexOpHelperFactory extends Serializable {
- public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, final IHyracksStageletContext ctx, int partition,
- IndexHelperOpenMode mode);
+ public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
+ final IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode);
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
index 99affd3..6ca4529 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/ITreeIndexOperatorDescriptorHelper.java
@@ -25,6 +25,6 @@
public IIndexRegistryProvider<ITreeIndex> getTreeIndexRegistryProvider();
public RecordDescriptor getRecordDescriptor();
-
+
public ITreeIndexOpHelperFactory getTreeIndexOpHelperFactory();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
index 0b27a0e..aa41184 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexHelperOpenMode.java
@@ -1,5 +1,5 @@
package edu.uci.ics.hyracks.storage.am.common.dataflow;
public enum IndexHelperOpenMode {
- OPEN, CREATE, ENLIST
+ OPEN, CREATE, ENLIST
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
index e62b41d..de00d5a 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/IndexRegistry.java
@@ -27,7 +27,7 @@
public IndexType get(int fileId) {
return map.get(fileId);
}
-
+
public void lock() {
registryLock.lock();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
index 4398aad..7d585b3 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorDescriptor.java
@@ -32,14 +32,15 @@
private static final long serialVersionUID = 1L;
private final int[] fieldPermutation;
- private final float fillFactor;
-
+ private final float fillFactor;
+
public TreeIndexBulkLoadOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
- ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation, float fillFactor, ITreeIndexOpHelperFactory opHelperFactory) {
- super(spec, 1, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory, leafFrameFactory,
- typeTraits, comparatorFactories, opHelperFactory);
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories, int[] fieldPermutation,
+ float fillFactor, ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 1, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
this.fieldPermutation = fieldPermutation;
this.fillFactor = fillFactor;
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
index 60e7e98..2001039 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexBulkLoadOperatorNodePushable.java
@@ -36,9 +36,11 @@
private PermutingFrameTupleReference tuple = new PermutingFrameTupleReference();
- public TreeIndexBulkLoadOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksStageletContext ctx,
- int partition, int[] fieldPermutation, float fillFactor, IRecordDescriptorProvider recordDescProvider) {
- treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition, IndexHelperOpenMode.CREATE);
+ public TreeIndexBulkLoadOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+ IHyracksStageletContext ctx, int partition, int[] fieldPermutation, float fillFactor,
+ IRecordDescriptorProvider recordDescProvider) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.CREATE);
this.fillFactor = fillFactor;
this.recordDescProvider = recordDescProvider;
tuple.setFieldPermutation(fieldPermutation);
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
index 5ec6083..415b9e3 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDiskOrderScanOperatorNodePushable.java
@@ -34,9 +34,10 @@
public class TreeIndexDiskOrderScanOperatorNodePushable extends AbstractUnaryOutputSourceOperatorNodePushable {
private final TreeIndexOpHelper treeIndexOpHelper;
- public TreeIndexDiskOrderScanOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksStageletContext ctx,
- int partition) {
- treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition, IndexHelperOpenMode.OPEN);
+ public TreeIndexDiskOrderScanOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
+ IHyracksStageletContext ctx, int partition) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.OPEN);
}
@Override
@@ -49,55 +50,55 @@
IndexOpContext diskOrderScanOpCtx = treeIndexOpHelper.getTreeIndex().createOpContext(IndexOp.DISKORDERSCAN,
cursorFrame, null, null);
try {
-
- treeIndexOpHelper.init();
-
- try {
- treeIndexOpHelper.getTreeIndex().diskOrderScan(cursor, cursorFrame, metaFrame, diskOrderScanOpCtx);
- int fieldCount = treeIndexOpHelper.getTreeIndex().getFieldCount();
- ByteBuffer frame = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksStageletContext().getFrameSize());
- appender.reset(frame, true);
- ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
- DataOutput dos = tb.getDataOutput();
+ treeIndexOpHelper.init();
- while (cursor.hasNext()) {
- tb.reset();
- cursor.next();
+ try {
+ treeIndexOpHelper.getTreeIndex().diskOrderScan(cursor, cursorFrame, metaFrame, diskOrderScanOpCtx);
- ITupleReference frameTuple = cursor.getTuple();
- for (int i = 0; i < frameTuple.getFieldCount(); i++) {
- dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
- tb.addFieldEndOffset();
- }
+ int fieldCount = treeIndexOpHelper.getTreeIndex().getFieldCount();
+ ByteBuffer frame = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(treeIndexOpHelper.getHyracksStageletContext()
+ .getFrameSize());
+ appender.reset(frame, true);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- FrameUtils.flushFrame(frame, writer);
- appender.reset(frame, true);
- if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
- throw new IllegalStateException();
- }
- }
- }
+ while (cursor.hasNext()) {
+ tb.reset();
+ cursor.next();
- if (appender.getTupleCount() > 0) {
- FrameUtils.flushFrame(frame, writer);
- }
- }
- finally {
- cursor.close();
- writer.close();
- }
+ ITupleReference frameTuple = cursor.getTuple();
+ for (int i = 0; i < frameTuple.getFieldCount(); i++) {
+ dos.write(frameTuple.getFieldData(i), frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ tb.addFieldEndOffset();
+ }
- } catch(Exception e) {
- deinitialize();
- throw new HyracksDataException(e);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(frame, writer);
+ appender.reset(frame, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
+
+ if (appender.getTupleCount() > 0) {
+ FrameUtils.flushFrame(frame, writer);
+ }
+ } finally {
+ cursor.close();
+ writer.close();
+ }
+
+ } catch (Exception e) {
+ deinitialize();
+ throw new HyracksDataException(e);
}
}
@Override
public void deinitialize() throws HyracksDataException {
- treeIndexOpHelper.deinit();
+ treeIndexOpHelper.deinit();
}
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
index f5348c3..2cfa905 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorDescriptor.java
@@ -32,7 +32,7 @@
private IStorageManagerInterface storageManager;
private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
private IFileSplitProvider fileSplitProvider;
-
+
public TreeIndexDropOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider) {
super(spec, 0, 0);
@@ -44,6 +44,7 @@
@Override
public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
- return new TreeIndexDropOperatorNodePushable(ctx, storageManager, treeIndexRegistryProvider, fileSplitProvider, partition);
+ return new TreeIndexDropOperatorNodePushable(ctx, storageManager, treeIndexRegistryProvider, fileSplitProvider,
+ partition);
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
index 3e34c7b..71346f7 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexDropOperatorNodePushable.java
@@ -31,16 +31,17 @@
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
public class TreeIndexDropOperatorNodePushable extends AbstractOperatorNodePushable {
- private static final Logger LOGGER = Logger.getLogger(TreeIndexDropOperatorNodePushable.class.getName());
-
- private final IHyracksStageletContext ctx;
+ private static final Logger LOGGER = Logger.getLogger(TreeIndexDropOperatorNodePushable.class.getName());
+
+ private final IHyracksStageletContext ctx;
private IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider;
private IStorageManagerInterface storageManager;
private IFileSplitProvider fileSplitProvider;
private int partition;
public TreeIndexDropOperatorNodePushable(IHyracksStageletContext ctx, IStorageManagerInterface storageManager,
- IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider, int partition) {
+ IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
+ int partition) {
this.ctx = ctx;
this.storageManager = storageManager;
this.treeIndexRegistryProvider = treeIndexRegistryProvider;
@@ -64,39 +65,41 @@
@Override
public void initialize() throws HyracksDataException {
- try {
+ try {
- IndexRegistry<ITreeIndex> treeIndexRegistry = treeIndexRegistryProvider.getRegistry(ctx);
- IBufferCache bufferCache = storageManager.getBufferCache(ctx);
- IFileMapProvider fileMapProvider = storageManager.getFileMapProvider(ctx);
+ IndexRegistry<ITreeIndex> treeIndexRegistry = treeIndexRegistryProvider.getRegistry(ctx);
+ IBufferCache bufferCache = storageManager.getBufferCache(ctx);
+ IFileMapProvider fileMapProvider = storageManager.getFileMapProvider(ctx);
- FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
+ FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
- boolean fileIsMapped = fileMapProvider.isMapped(f);
- if (!fileIsMapped) {
- throw new HyracksDataException("Cannot drop Tree with name " + f.toString() + ". No file mapping exists.");
- }
+ boolean fileIsMapped = fileMapProvider.isMapped(f);
+ if (!fileIsMapped) {
+ throw new HyracksDataException("Cannot drop Tree with name " + f.toString()
+ + ". No file mapping exists.");
+ }
- int indexFileId = fileMapProvider.lookupFileId(f);
+ int indexFileId = fileMapProvider.lookupFileId(f);
- // unregister tree instance
- treeIndexRegistry.lock();
- try {
- treeIndexRegistry.unregister(indexFileId);
- } finally {
- treeIndexRegistry.unlock();
- }
+ // unregister tree instance
+ treeIndexRegistry.lock();
+ try {
+ treeIndexRegistry.unregister(indexFileId);
+ } finally {
+ treeIndexRegistry.unlock();
+ }
- // remove name to id mapping
- bufferCache.deleteFile(indexFileId);
- }
- // TODO: for the time being we don't throw,
- // with proper exception handling (no hanging job problem) we should throw
- catch (Exception e) {
- if (LOGGER.isLoggable(Level.WARNING)) {
+ // remove name to id mapping
+ bufferCache.deleteFile(indexFileId);
+ }
+ // TODO: for the time being we don't throw,
+ // with proper exception handling (no hanging job problem) we should
+ // throw
+ catch (Exception e) {
+ if (LOGGER.isLoggable(Level.WARNING)) {
LOGGER.warning("Tree Drop Operator Failed Due To Exception: " + e.getMessage());
}
- }
+ }
}
@Override
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
index 533a047..74a9efc 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexInsertUpdateDeleteOperatorNodePushable.java
@@ -40,7 +40,8 @@
public TreeIndexInsertUpdateDeleteOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc,
IHyracksStageletContext ctx, int partition, int[] fieldPermutation,
IRecordDescriptorProvider recordDescProvider, IndexOp op) {
- treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition, IndexHelperOpenMode.OPEN);
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.OPEN);
this.recordDescProvider = recordDescProvider;
this.op = op;
tuple.setFieldPermutation(fieldPermutation);
@@ -48,20 +49,21 @@
@Override
public void open() throws HyracksDataException {
- AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor)treeIndexOpHelper.getOperatorDescriptor();
- RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
- accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), inputRecDesc);
- writeBuffer = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
- try {
- treeIndexOpHelper.init();
- treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
- opCtx = treeIndexOpHelper.getTreeIndex().createOpContext(op, treeIndexOpHelper.getLeafFrame(),
- treeIndexOpHelper.getInteriorFrame(), new LIFOMetaDataFrame());
- } catch(Exception e) {
- // cleanup in case of failure
- treeIndexOpHelper.deinit();
- throw new HyracksDataException(e);
- }
+ AbstractTreeIndexOperatorDescriptor opDesc = (AbstractTreeIndexOperatorDescriptor) treeIndexOpHelper
+ .getOperatorDescriptor();
+ RecordDescriptor inputRecDesc = recordDescProvider.getInputRecordDescriptor(opDesc.getOperatorId(), 0);
+ accessor = new FrameTupleAccessor(treeIndexOpHelper.getHyracksStageletContext().getFrameSize(), inputRecDesc);
+ writeBuffer = treeIndexOpHelper.getHyracksStageletContext().allocateFrame();
+ try {
+ treeIndexOpHelper.init();
+ treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
+ opCtx = treeIndexOpHelper.getTreeIndex().createOpContext(op, treeIndexOpHelper.getLeafFrame(),
+ treeIndexOpHelper.getInteriorFrame(), new LIFOMetaDataFrame());
+ } catch (Exception e) {
+ // cleanup in case of failure
+ treeIndexOpHelper.deinit();
+ throw new HyracksDataException(e);
+ }
}
@Override
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
index 741a5a5..fca6d8d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexOpHelper.java
@@ -23,34 +23,35 @@
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-public abstract class TreeIndexOpHelper {
-
- protected ITreeIndexFrame interiorFrame;
- protected ITreeIndexFrame leafFrame;
- protected MultiComparator cmp;
-
- protected ITreeIndex treeIndex;
- protected int indexFileId = -1;
- protected int partition;
+public abstract class TreeIndexOpHelper {
- protected ITreeIndexOperatorDescriptorHelper opDesc;
- protected IHyracksStageletContext ctx;
+ protected ITreeIndexFrame interiorFrame;
+ protected ITreeIndexFrame leafFrame;
+ protected MultiComparator cmp;
- protected IndexHelperOpenMode mode;
+ protected ITreeIndex treeIndex;
+ protected int indexFileId = -1;
+ protected int partition;
- public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, final IHyracksStageletContext ctx, int partition,
- IndexHelperOpenMode mode) {
+ protected ITreeIndexOperatorDescriptorHelper opDesc;
+ protected IHyracksStageletContext ctx;
+
+ protected IndexHelperOpenMode mode;
+
+ public TreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc, final IHyracksStageletContext ctx,
+ int partition, IndexHelperOpenMode mode) {
this.opDesc = opDesc;
this.ctx = ctx;
this.mode = mode;
this.partition = partition;
}
- public void init() throws HyracksDataException {
+ public void init() throws HyracksDataException {
IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
IFileMapProvider fileMapProvider = opDesc.getStorageManager().getFileMapProvider(ctx);
IFileSplitProvider fileSplitProvider = opDesc.getTreeIndexFileSplitProvider();
@@ -58,94 +59,90 @@
FileReference f = fileSplitProvider.getFileSplits()[partition].getLocalFile();
boolean fileIsMapped = fileMapProvider.isMapped(f);
- switch (mode) {
-
- case OPEN: {
- if (!fileIsMapped) {
- throw new HyracksDataException(
- "Trying to open tree index from unmapped file " + f.toString());
- }
- }
- break;
+ switch (mode) {
- case CREATE:
- case ENLIST: {
- if (!fileIsMapped) {
- bufferCache.createFile(f);
- }
- }
- break;
-
- }
-
- int fileId = fileMapProvider.lookupFileId(f);
- try {
- bufferCache.openFile(fileId);
- } catch(HyracksDataException e) {
- // revert state of buffer cache since file failed to open
- if(!fileIsMapped) {
- bufferCache.deleteFile(fileId);
- }
- throw e;
+ case OPEN: {
+ if (!fileIsMapped) {
+ throw new HyracksDataException("Trying to open tree index from unmapped file " + f.toString());
+ }
+ }
+ break;
+
+ case CREATE:
+ case ENLIST: {
+ if (!fileIsMapped) {
+ bufferCache.createFile(f);
+ }
+ }
+ break;
+
}
-
- // only set indexFileId member when openFile() succeeds,
+
+ int fileId = fileMapProvider.lookupFileId(f);
+ try {
+ bufferCache.openFile(fileId);
+ } catch (HyracksDataException e) {
+ // revert state of buffer cache since file failed to open
+ if (!fileIsMapped) {
+ bufferCache.deleteFile(fileId);
+ }
+ throw e;
+ }
+
+ // only set indexFileId member when openFile() succeeds,
// otherwise deinit() will try to close the file that failed to open
indexFileId = fileId;
- interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
- leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
+ interiorFrame = opDesc.getTreeIndexInteriorFactory().createFrame();
+ leafFrame = opDesc.getTreeIndexLeafFactory().createFrame();
IndexRegistry<ITreeIndex> treeIndexRegistry = opDesc.getTreeIndexRegistryProvider().getRegistry(ctx);
treeIndex = treeIndexRegistry.get(indexFileId);
if (treeIndex == null) {
- // create new tree and register it
+ // create new tree and register it
treeIndexRegistry.lock();
- try {
- // check if tree has already been registered by another thread
- treeIndex = treeIndexRegistry.get(indexFileId);
- if (treeIndex == null) {
- // this thread should create and register the tree
+ try {
+ // check if tree has already been registered by another thread
+ treeIndex = treeIndexRegistry.get(indexFileId);
+ if (treeIndex == null) {
+ // this thread should create and register the tree
- IBinaryComparator[] comparators = new IBinaryComparator[opDesc
- .getTreeIndexComparatorFactories().length];
- for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
- comparators[i] = opDesc.getTreeIndexComparatorFactories()[i]
- .createBinaryComparator();
- }
+ IBinaryComparator[] comparators = new IBinaryComparator[opDesc.getTreeIndexComparatorFactories().length];
+ for (int i = 0; i < opDesc.getTreeIndexComparatorFactories().length; i++) {
+ comparators[i] = opDesc.getTreeIndexComparatorFactories()[i].createBinaryComparator();
+ }
- cmp = new MultiComparator(opDesc
- .getTreeIndexTypeTraits(), comparators);
-
- treeIndex = createTreeIndex();
- if (mode == IndexHelperOpenMode.CREATE) {
- ITreeIndexMetaDataFrame metaFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory().createFrame();
- try {
- treeIndex.create(indexFileId, leafFrame, metaFrame);
- } catch (Exception e) {
- throw new HyracksDataException(e);
- }
- }
- treeIndex.open(indexFileId);
- treeIndexRegistry.register(indexFileId, treeIndex);
- }
- } finally {
- treeIndexRegistry.unlock();
- }
- }
- }
+ cmp = new MultiComparator(opDesc.getTreeIndexTypeTraits(), comparators);
- // MUST be overridden
- public ITreeIndex createTreeIndex() throws HyracksDataException {
- throw new HyracksDataException("createTreeIndex Operation not implemented.");
- }
-
- // MUST be overridden
- public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
- throw new HyracksDataException("createDiskOrderScanCursor Operation not implemented.");
- }
-
+ treeIndex = createTreeIndex();
+ if (mode == IndexHelperOpenMode.CREATE) {
+ ITreeIndexMetaDataFrame metaFrame = treeIndex.getFreePageManager().getMetaDataFrameFactory()
+ .createFrame();
+ try {
+ treeIndex.create(indexFileId, leafFrame, metaFrame);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+ treeIndex.open(indexFileId);
+ treeIndexRegistry.register(indexFileId, treeIndex);
+ }
+ } finally {
+ treeIndexRegistry.unlock();
+ }
+ }
+ }
+
+ // MUST be overridden
+ public ITreeIndex createTreeIndex() throws HyracksDataException {
+ throw new HyracksDataException("createTreeIndex Operation not implemented.");
+ }
+
+ public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
+ return new TreeDiskOrderScanCursor(leafFrame);
+ }
+
public void deinit() throws HyracksDataException {
if (indexFileId != -1) {
IBufferCache bufferCache = opDesc.getStorageManager().getBufferCache(ctx);
@@ -153,27 +150,27 @@
}
}
- public ITreeIndex getTreeIndex() {
- return treeIndex;
- }
+ public ITreeIndex getTreeIndex() {
+ return treeIndex;
+ }
public IHyracksStageletContext getHyracksStageletContext() {
return ctx;
}
- public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
- return opDesc;
- }
+ public ITreeIndexOperatorDescriptorHelper getOperatorDescriptor() {
+ return opDesc;
+ }
- public ITreeIndexFrame getLeafFrame() {
- return leafFrame;
- }
+ public ITreeIndexFrame getLeafFrame() {
+ return leafFrame;
+ }
- public ITreeIndexFrame getInteriorFrame() {
- return interiorFrame;
- }
+ public ITreeIndexFrame getInteriorFrame() {
+ return interiorFrame;
+ }
- public int getIndexFileId() {
- return indexFileId;
- }
+ public int getIndexFileId() {
+ return indexFileId;
+ }
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
index 1c0c734..574e727 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorDescriptor.java
@@ -15,15 +15,16 @@
public class TreeIndexStatsOperatorDescriptor extends AbstractTreeIndexOperatorDescriptor {
private static final long serialVersionUID = 1L;
-
+
public TreeIndexStatsOperatorDescriptor(JobSpecification spec, IStorageManagerInterface storageManager,
IIndexRegistryProvider<ITreeIndex> treeIndexRegistryProvider, IFileSplitProvider fileSplitProvider,
- ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory, ITypeTrait[] typeTraits,
- IBinaryComparatorFactory[] comparatorFactories, ITreeIndexOpHelperFactory opHelperFactory) {
- super(spec, 0, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory, leafFrameFactory,
- typeTraits, comparatorFactories, opHelperFactory);
+ ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory,
+ ITypeTrait[] typeTraits, IBinaryComparatorFactory[] comparatorFactories,
+ ITreeIndexOpHelperFactory opHelperFactory) {
+ super(spec, 0, 0, null, storageManager, treeIndexRegistryProvider, fileSplitProvider, interiorFrameFactory,
+ leafFrameFactory, typeTraits, comparatorFactories, opHelperFactory);
}
-
+
@Override
public IOperatorNodePushable createPushRuntime(IHyracksStageletContext ctx, IOperatorEnvironment env,
IRecordDescriptorProvider recordDescProvider, int partition, int nPartitions) {
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
index 11ac96f..f47855f 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/dataflow/TreeIndexStatsOperatorNodePushable.java
@@ -24,54 +24,55 @@
import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-public class TreeIndexStatsOperatorNodePushable extends
-AbstractOperatorNodePushable {
- private final TreeIndexOpHelper treeIndexOpHelper;
- private final IHyracksStageletContext ctx;
- private TreeIndexStatsGatherer statsGatherer;
+public class TreeIndexStatsOperatorNodePushable extends AbstractOperatorNodePushable {
+ private final TreeIndexOpHelper treeIndexOpHelper;
+ private final IHyracksStageletContext ctx;
+ private TreeIndexStatsGatherer statsGatherer;
- public TreeIndexStatsOperatorNodePushable(
- AbstractTreeIndexOperatorDescriptor opDesc,
- IHyracksStageletContext ctx, int partition) {
- treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
- IndexHelperOpenMode.CREATE);
- this.ctx = ctx;
- }
+ public TreeIndexStatsOperatorNodePushable(AbstractTreeIndexOperatorDescriptor opDesc, IHyracksStageletContext ctx,
+ int partition) {
+ treeIndexOpHelper = opDesc.getTreeIndexOpHelperFactory().createTreeIndexOpHelper(opDesc, ctx, partition,
+ IndexHelperOpenMode.CREATE);
+ this.ctx = ctx;
+ }
- @Override
- public void deinitialize() throws HyracksDataException {
- }
+ @Override
+ public void deinitialize() throws HyracksDataException {
+ }
- @Override
- public int getInputArity() {
- return 0;
- }
+ @Override
+ public int getInputArity() {
+ return 0;
+ }
- @Override
- public IFrameWriter getInputFrameWriter(int index) {
- return null;
- }
+ @Override
+ public IFrameWriter getInputFrameWriter(int index) {
+ return null;
+ }
- @Override
- public void initialize() throws HyracksDataException {
- try {
- treeIndexOpHelper.init();
- treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
+ @Override
+ public void initialize() throws HyracksDataException {
+ try {
+ treeIndexOpHelper.init();
+ treeIndexOpHelper.getTreeIndex().open(treeIndexOpHelper.getIndexFileId());
- ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
- IBufferCache bufferCache = treeIndexOpHelper.getOperatorDescriptor().getStorageManager().getBufferCache(ctx);
+ ITreeIndex treeIndex = treeIndexOpHelper.getTreeIndex();
+ IBufferCache bufferCache = treeIndexOpHelper.getOperatorDescriptor().getStorageManager()
+ .getBufferCache(ctx);
- statsGatherer = new TreeIndexStatsGatherer(bufferCache, treeIndex.getFreePageManager(), treeIndexOpHelper.getIndexFileId(), treeIndex.getRootPageId());
- TreeIndexStats stats = statsGatherer.gatherStats(treeIndex.getLeafFrameFactory().createFrame(), treeIndex.getInteriorFrameFactory().createFrame(), treeIndex.getFreePageManager().getMetaDataFrameFactory().createFrame());
- System.err.println(stats.toString());
- } catch (Exception e) {
- treeIndexOpHelper.deinit();
- throw new HyracksDataException(e);
- }
- }
+ statsGatherer = new TreeIndexStatsGatherer(bufferCache, treeIndex.getFreePageManager(),
+ treeIndexOpHelper.getIndexFileId(), treeIndex.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(treeIndex.getLeafFrameFactory().createFrame(), treeIndex
+ .getInteriorFrameFactory().createFrame(), treeIndex.getFreePageManager().getMetaDataFrameFactory()
+ .createFrame());
+ System.err.println(stats.toString());
+ } catch (Exception e) {
+ treeIndexOpHelper.deinit();
+ throw new HyracksDataException(e);
+ }
+ }
- @Override
- public void setOutputFrameWriter(int index, IFrameWriter writer,
- RecordDescriptor recordDesc) {
- }
+ @Override
+ public void setOutputFrameWriter(int index, IFrameWriter writer, RecordDescriptor recordDesc) {
+ }
}
\ No newline at end of file
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
index 7ad12eb..87fea47 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/AbstractSlotManager.java
@@ -47,7 +47,7 @@
public int getSlotSize() {
return slotSize;
}
-
+
@Override
public void setFrame(ITreeIndexFrame frame) {
this.frame = frame;
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
index 0b24c95..af1e337 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/frames/TreeIndexNSMFrame.java
@@ -72,7 +72,7 @@
public boolean isLeaf() {
return buf.get(levelOff) == 0;
}
-
+
@Override
public boolean isInterior() {
return buf.get(levelOff) > 0;
@@ -160,14 +160,14 @@
buf.putInt(freeSpaceOff, freeSpace);
buf.putInt(totalFreeSpaceOff, buf.capacity() - freeSpace - tupleCount * slotManager.getSlotSize());
-
+
return false;
}
@Override
public void delete(ITupleReference tuple, MultiComparator cmp, boolean exactDelete) throws Exception {
-
- frameTuple.setFieldCount(cmp.getFieldCount());
+
+ frameTuple.setFieldCount(cmp.getFieldCount());
int tupleIndex = slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_EXACT,
FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
int slotOff = slotManager.getSlotOff(tupleIndex);
@@ -180,11 +180,11 @@
int tupleOff = slotManager.getTupleOff(slotOff);
frameTuple.resetByTupleOffset(buf, tupleOff);
- int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1, cmp
- .getFieldCount()
- - cmp.getKeyFieldCount());
+ int comparison = cmp.fieldRangeCompare(tuple, frameTuple, cmp.getKeyFieldCount() - 1,
+ cmp.getFieldCount() - cmp.getKeyFieldCount());
if (comparison != 0) {
- throw new TreeIndexException("Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
+ throw new TreeIndexException(
+ "Cannot delete tuple. Byte-by-byte comparison failed to prove equality.");
}
}
@@ -222,18 +222,19 @@
}
protected void resetSpaceParams() {
- buf.putInt(freeSpaceOff, smFlagOff + 1);
- buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
+ buf.putInt(freeSpaceOff, smFlagOff + 1);
+ buf.putInt(totalFreeSpaceOff, buf.capacity() - (smFlagOff + 1));
}
@Override
public int findTupleIndex(ITupleReference tuple, MultiComparator cmp) throws Exception {
- frameTuple.setFieldCount(cmp.getFieldCount());
- return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE, FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
+ frameTuple.setFieldCount(cmp.getFieldCount());
+ return slotManager.findTupleIndex(tuple, frameTuple, cmp, FindTupleMode.FTM_INCLUSIVE,
+ FindTupleNoExactMatchPolicy.FTP_HIGHER_KEY);
}
-
+
@Override
- public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
+ public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception {
slotManager.insertSlot(tupleIndex, buf.getInt(freeSpaceOff));
int bytesWritten = tupleWriter.writeTuple(tuple, buf, buf.getInt(freeSpaceOff));
buf.putInt(tupleCountOff, buf.getInt(tupleCountOff) + 1);
@@ -270,8 +271,8 @@
for (int i = 0; i < tupleCount; i++) {
frameTuple.resetByTupleIndex(this, i);
for (int j = 0; j < cmp.getKeyFieldCount(); j++) {
- ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j), frameTuple
- .getFieldStart(j), frameTuple.getFieldLength(j));
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(j),
+ frameTuple.getFieldStart(j), frameTuple.getFieldLength(j));
DataInput dataIn = new DataInputStream(inStream);
Object o = fields[j].deserialize(dataIn);
strBuilder.append(o.toString() + " ");
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
index 75470da..42bf70f 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/freepage/LinkedListFreePageManager.java
@@ -10,190 +10,178 @@
public class LinkedListFreePageManager implements IFreePageManager {
- private static final byte META_PAGE_LEVEL_INDICATOR = -1;
- private static final byte FREE_PAGE_LEVEL_INDICATOR = -2;
- private final IBufferCache bufferCache;
- private final int fileId;
- private final int headPage;
- private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
+ private static final byte META_PAGE_LEVEL_INDICATOR = -1;
+ private static final byte FREE_PAGE_LEVEL_INDICATOR = -2;
+ private final IBufferCache bufferCache;
+ private final int fileId;
+ private final int headPage;
+ private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
- public LinkedListFreePageManager(IBufferCache bufferCache, int fileId, int headPage, ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
- this.bufferCache = bufferCache;
- this.fileId = fileId;
- this.headPage = headPage;
- this.metaDataFrameFactory = metaDataFrameFactory;
- }
+ public LinkedListFreePageManager(IBufferCache bufferCache, int fileId, int headPage,
+ ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
+ this.bufferCache = bufferCache;
+ this.fileId = fileId;
+ this.headPage = headPage;
+ this.metaDataFrameFactory = metaDataFrameFactory;
+ }
- @Override
- public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage)
- throws HyracksDataException {
+ @Override
+ public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage) throws HyracksDataException {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, headPage), false);
- metaNode.acquireWriteLatch();
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+ metaNode.acquireWriteLatch();
- try {
- metaFrame.setPage(metaNode);
+ try {
+ metaFrame.setPage(metaNode);
- if (metaFrame.hasSpace()) {
- metaFrame.addFreePage(freePage);
- } else {
- // allocate a new page in the chain of meta pages
- int newPage = metaFrame.getFreePage();
- if (newPage < 0) {
- throw new Exception(
- "Inconsistent Meta Page State. It has no space, but it also has no entries.");
- }
+ if (metaFrame.hasSpace()) {
+ metaFrame.addFreePage(freePage);
+ } else {
+ // allocate a new page in the chain of meta pages
+ int newPage = metaFrame.getFreePage();
+ if (newPage < 0) {
+ throw new Exception("Inconsistent Meta Page State. It has no space, but it also has no entries.");
+ }
- ICachedPage newNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, newPage), false);
- newNode.acquireWriteLatch();
+ ICachedPage newNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newPage), false);
+ newNode.acquireWriteLatch();
- try {
- int metaMaxPage = metaFrame.getMaxPage();
+ try {
+ int metaMaxPage = metaFrame.getMaxPage();
- // copy metaDataPage to newNode
- System.arraycopy(metaNode.getBuffer().array(), 0, newNode
- .getBuffer().array(), 0, metaNode.getBuffer()
- .capacity());
+ // copy metaDataPage to newNode
+ System.arraycopy(metaNode.getBuffer().array(), 0, newNode.getBuffer().array(), 0, metaNode
+ .getBuffer().capacity());
- metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
- metaFrame.setNextPage(newPage);
- metaFrame.setMaxPage(metaMaxPage);
- metaFrame.addFreePage(freePage);
- } finally {
- newNode.releaseWriteLatch();
- bufferCache.unpin(newNode);
- }
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- metaNode.releaseWriteLatch();
- bufferCache.unpin(metaNode);
- }
- }
+ metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
+ metaFrame.setNextPage(newPage);
+ metaFrame.setMaxPage(metaMaxPage);
+ metaFrame.addFreePage(freePage);
+ } finally {
+ newNode.releaseWriteLatch();
+ bufferCache.unpin(newNode);
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
+ }
- @Override
- public int getFreePage(ITreeIndexMetaDataFrame metaFrame)
- throws HyracksDataException {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, headPage), false);
+ @Override
+ public int getFreePage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
- metaNode.acquireWriteLatch();
+ metaNode.acquireWriteLatch();
- int freePage = -1;
- try {
- metaFrame.setPage(metaNode);
- freePage = metaFrame.getFreePage();
- if (freePage < 0) { // no free page entry on this page
- int nextPage = metaFrame.getNextPage();
- if (nextPage > 0) { // sibling may have free pages
- ICachedPage nextNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, nextPage), false);
+ int freePage = -1;
+ try {
+ metaFrame.setPage(metaNode);
+ freePage = metaFrame.getFreePage();
+ if (freePage < 0) { // no free page entry on this page
+ int nextPage = metaFrame.getNextPage();
+ if (nextPage > 0) { // sibling may have free pages
+ ICachedPage nextNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, nextPage), false);
- nextNode.acquireWriteLatch();
- // we copy over the free space entries of nextpage into the
- // first meta page (metaDataPage)
- // we need to link the first page properly to the next page
- // of nextpage
- try {
- // remember entries that remain unchanged
- int maxPage = metaFrame.getMaxPage();
+ nextNode.acquireWriteLatch();
+ // we copy over the free space entries of nextpage into the
+ // first meta page (metaDataPage)
+ // we need to link the first page properly to the next page
+ // of nextpage
+ try {
+ // remember entries that remain unchanged
+ int maxPage = metaFrame.getMaxPage();
- // copy entire page (including sibling pointer, free
- // page entries, and all other info)
- // after this copy nextPage is considered a free page
- System.arraycopy(nextNode.getBuffer().array(), 0,
- metaNode.getBuffer().array(), 0, nextNode
- .getBuffer().capacity());
+ // copy entire page (including sibling pointer, free
+ // page entries, and all other info)
+ // after this copy nextPage is considered a free page
+ System.arraycopy(nextNode.getBuffer().array(), 0, metaNode.getBuffer().array(), 0, nextNode
+ .getBuffer().capacity());
- // reset unchanged entry
- metaFrame.setMaxPage(maxPage);
+ // reset unchanged entry
+ metaFrame.setMaxPage(maxPage);
- freePage = metaFrame.getFreePage();
- // sibling also has no free pages, this "should" not
- // happen, but we deal with it anyway just to be safe
- if (freePage < 0) {
- freePage = nextPage;
- } else {
- metaFrame.addFreePage(nextPage);
- }
- } finally {
- nextNode.releaseWriteLatch();
- bufferCache.unpin(nextNode);
- }
- } else {
- freePage = metaFrame.getMaxPage();
- freePage++;
- metaFrame.setMaxPage(freePage);
- }
- }
- } finally {
- metaNode.releaseWriteLatch();
- bufferCache.unpin(metaNode);
- }
-
- return freePage;
- }
+ freePage = metaFrame.getFreePage();
+ // sibling also has no free pages, this "should" not
+ // happen, but we deal with it anyway just to be safe
+ if (freePage < 0) {
+ freePage = nextPage;
+ } else {
+ metaFrame.addFreePage(nextPage);
+ }
+ } finally {
+ nextNode.releaseWriteLatch();
+ bufferCache.unpin(nextNode);
+ }
+ } else {
+ freePage = metaFrame.getMaxPage();
+ freePage++;
+ metaFrame.setMaxPage(freePage);
+ }
+ }
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
- @Override
- public int getMaxPage(ITreeIndexMetaDataFrame metaFrame)
- throws HyracksDataException {
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, headPage), false);
- metaNode.acquireWriteLatch();
- int maxPage = -1;
- try {
- metaFrame.setPage(metaNode);
- maxPage = metaFrame.getMaxPage();
- } finally {
- metaNode.releaseWriteLatch();
- bufferCache.unpin(metaNode);
- }
- return maxPage;
- }
+ return freePage;
+ }
- @Override
- public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage)
- throws HyracksDataException {
- // initialize meta data page
- ICachedPage metaNode = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, headPage), true);
+ @Override
+ public int getMaxPage(ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), false);
+ metaNode.acquireWriteLatch();
+ int maxPage = -1;
+ try {
+ metaFrame.setPage(metaNode);
+ maxPage = metaFrame.getMaxPage();
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
+ return maxPage;
+ }
- metaNode.acquireWriteLatch();
- try {
- metaFrame.setPage(metaNode);
- metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
- metaFrame.setMaxPage(currentMaxPage);
- } finally {
- metaNode.releaseWriteLatch();
- bufferCache.unpin(metaNode);
- }
- }
+ @Override
+ public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage) throws HyracksDataException {
+ // initialize meta data page
+ ICachedPage metaNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, headPage), true);
- @Override
- public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory() {
- return metaDataFrameFactory;
- }
+ metaNode.acquireWriteLatch();
+ try {
+ metaFrame.setPage(metaNode);
+ metaFrame.initBuffer(META_PAGE_LEVEL_INDICATOR);
+ metaFrame.setMaxPage(currentMaxPage);
+ } finally {
+ metaNode.releaseWriteLatch();
+ bufferCache.unpin(metaNode);
+ }
+ }
- @Override
- public byte getFreePageLevelIndicator() {
- return FREE_PAGE_LEVEL_INDICATOR;
- }
+ @Override
+ public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory() {
+ return metaDataFrameFactory;
+ }
- @Override
- public byte getMetaPageLevelIndicator() {
- return META_PAGE_LEVEL_INDICATOR;
- }
+ @Override
+ public byte getFreePageLevelIndicator() {
+ return FREE_PAGE_LEVEL_INDICATOR;
+ }
- @Override
- public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame) {
- return metaFrame.getLevel() == FREE_PAGE_LEVEL_INDICATOR;
- }
+ @Override
+ public byte getMetaPageLevelIndicator() {
+ return META_PAGE_LEVEL_INDICATOR;
+ }
- @Override
- public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame) {
- return metaFrame.getLevel() == META_PAGE_LEVEL_INDICATOR;
- }
+ @Override
+ public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame) {
+ return metaFrame.getLevel() == FREE_PAGE_LEVEL_INDICATOR;
+ }
+
+ @Override
+ public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame) {
+ return metaFrame.getLevel() == META_PAGE_LEVEL_INDICATOR;
+ }
}
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeDiskOrderScanCursor.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
similarity index 83%
rename from hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeDiskOrderScanCursor.java
rename to hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
index 5b69991..2c2cb5e 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTreeDiskOrderScanCursor.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/impls/TreeDiskOrderScanCursor.java
@@ -13,36 +13,32 @@
* limitations under the License.
*/
-package edu.uci.ics.hyracks.storage.am.btree.impls;
+package edu.uci.ics.hyracks.storage.am.common.impls;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-public class BTreeDiskOrderScanCursor implements ITreeIndexCursor {
-
- // TODO: might want to return tuples in physical order, not logical order to
- // speed up access
+public class TreeDiskOrderScanCursor implements ITreeIndexCursor {
private int tupleIndex = 0;
private int fileId = -1;
int currentPageId = -1;
- int maxPageId = -1; // TODO: figure out how to scan to the end of file, this
- // is dirty and may not with concurrent updates
+ int maxPageId = -1;
private ICachedPage page = null;
- private IBTreeLeafFrame frame = null;
+ private ITreeIndexFrame frame = null;
private IBufferCache bufferCache = null;
private ITreeIndexTupleReference frameTuple;
- public BTreeDiskOrderScanCursor(IBTreeLeafFrame frame) {
+ public TreeDiskOrderScanCursor(ITreeIndexFrame frame) {
this.frame = frame;
this.frameTuple = frame.getTupleWriter().createTupleReference();
}
@@ -114,16 +110,15 @@
bufferCache.unpin(page);
}
- page = ((CursorInitialState) initialState).getPage();
+ page = initialState.getPage();
tupleIndex = 0;
frame.setPage(page);
- RangePredicate pred = (RangePredicate) searchPred;
- MultiComparator lowKeyCmp = pred.getLowKeyComparator();
+ MultiComparator lowKeyCmp = searchPred.getLowKeyComparator();
frameTuple.setFieldCount(lowKeyCmp.getFieldCount());
boolean leafExists = positionToNextLeaf(false);
if (!leafExists) {
throw new HyracksDataException(
- "Failed to open disk-order scan cursor for B-tree. Traget B-tree has no leaves.");
+ "Failed to open disk-order scan cursor for tree index. Traget tree index has no leaves.");
}
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
index 9122174..4f6e656 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IndexOpContext.java
@@ -1,5 +1,5 @@
package edu.uci.ics.hyracks.storage.am.common.ophelpers;
public interface IndexOpContext {
- void reset();
+ void reset();
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
index 8902713..46551dd 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
@@ -31,7 +31,7 @@
public int size() {
return size;
}
-
+
public int first() {
return first;
}
@@ -69,11 +69,11 @@
public int getFirst() {
return data[first];
}
-
+
public void moveFirst() {
first++;
}
-
+
public void clear() {
size = 0;
first = 0;
@@ -82,7 +82,7 @@
public boolean isLast() {
return size == first;
}
-
+
public boolean isEmpty() {
return size == 0;
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
index 6524eb7..1842bf8 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/MultiComparator.java
@@ -32,7 +32,7 @@
private IBinaryComparator[] cmps = null;
private ITypeTrait[] typeTraits;
-
+
private IBinaryComparator intCmp = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
public IBinaryComparator getIntCmp() {
@@ -46,8 +46,8 @@
public int compare(ITupleReference tupleA, ITupleReference tupleB) {
for (int i = 0; i < cmps.length; i++) {
- int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB
- .getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
+ int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+ tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
if (cmp < 0)
return -1;
else if (cmp > 0)
@@ -58,8 +58,8 @@
public int fieldRangeCompare(ITupleReference tupleA, ITupleReference tupleB, int startFieldIndex, int numFields) {
for (int i = startFieldIndex; i < startFieldIndex + numFields; i++) {
- int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i), tupleB
- .getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
+ int cmp = cmps[i].compare(tupleA.getFieldData(i), tupleA.getFieldStart(i), tupleA.getFieldLength(i),
+ tupleB.getFieldData(i), tupleB.getFieldStart(i), tupleB.getFieldLength(i));
if (cmp < 0)
return -1;
else if (cmp > 0)
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
index 3ec7ae5..1730c4a 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/SimpleTupleWriter.java
@@ -58,8 +58,8 @@
int fieldEndOff = 0;
for (int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
fieldEndOff += tuple.getFieldLength(i);
runner += tuple.getFieldLength(i);
targetBuf.putShort(targetOff + nullFlagsBytes + i * 2, (short) fieldEndOff);
@@ -81,8 +81,8 @@
int fieldEndOff = 0;
int fieldCounter = 0;
for (int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
fieldEndOff += tuple.getFieldLength(i);
runner += tuple.getFieldLength(i);
targetBuf.putShort(targetOff + nullFlagsBytes + fieldCounter * 2, (short) fieldEndOff);
@@ -99,7 +99,7 @@
protected int getFieldSlotsBytes(ITupleReference tuple) {
return tuple.getFieldCount() * 2;
}
-
+
protected int getNullFlagsBytes(ITupleReference tuple, int startField, int numFields) {
return (int) Math.ceil((double) numFields / 8.0);
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
index cc03d21..81b48e5 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/tuples/TypeAwareTupleWriter.java
@@ -74,8 +74,8 @@
// write data fields
for (int i = 0; i < tuple.getFieldCount(); i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
runner += tuple.getFieldLength(i);
}
@@ -102,8 +102,8 @@
runner = encDec.getPos();
for (int i = startField; i < startField + numFields; i++) {
- System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner, tuple
- .getFieldLength(i));
+ System.arraycopy(tuple.getFieldData(i), tuple.getFieldStart(i), targetBuf.array(), runner,
+ tuple.getFieldLength(i));
runner += tuple.getFieldLength(i);
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
index 3e7fed9..eb261da 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexBufferCacheWarmup.java
@@ -13,74 +13,72 @@
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
public class TreeIndexBufferCacheWarmup {
- private final IBufferCache bufferCache;
- private final IFreePageManager freePageManager;
- private final int fileId;
- private final ArrayList<IntArrayList> pagesByLevel = new ArrayList<IntArrayList>();
- private final Random rnd = new Random();
-
- public TreeIndexBufferCacheWarmup(IBufferCache bufferCache,
- IFreePageManager freePageManager, int fileId) {
- this.bufferCache = bufferCache;
- this.freePageManager = freePageManager;
- this.fileId = fileId;
- }
-
- public void warmup(ITreeIndexFrame frame, ITreeIndexMetaDataFrame metaFrame, int[] warmupTreeLevels, int[] warmupRepeats) throws HyracksDataException {
- bufferCache.openFile(fileId);
+ private final IBufferCache bufferCache;
+ private final IFreePageManager freePageManager;
+ private final int fileId;
+ private final ArrayList<IntArrayList> pagesByLevel = new ArrayList<IntArrayList>();
+ private final Random rnd = new Random();
- // scan entire file to determine pages in each level
- int maxPageId = freePageManager.getMaxPage(metaFrame);
- for (int pageId = 0; pageId <= maxPageId; pageId++) {
- ICachedPage page = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, pageId), false);
- page.acquireReadLatch();
- try {
- frame.setPage(page);
- byte level = frame.getLevel();
- while(level >= pagesByLevel.size()) {
- pagesByLevel.add(new IntArrayList(100, 100));
- }
- if(level >= 0) {
- //System.out.println("ADDING: " + level + " " + pageId);
- pagesByLevel.get(level).add(pageId);
- }
- } finally {
- page.releaseReadLatch();
- bufferCache.unpin(page);
- }
- }
-
- // pin certain pages again to simulate frequent access
- for(int i = 0; i < warmupTreeLevels.length; i++) {
- if(warmupTreeLevels[i] < pagesByLevel.size()) {
- int repeats = warmupRepeats[i];
- IntArrayList pageIds = pagesByLevel.get(warmupTreeLevels[i]);
- int[] remainingPageIds = new int[pageIds.size()];
- for(int r = 0; r < repeats; r++) {
- for(int j = 0; j < pageIds.size(); j++) {
- remainingPageIds[j] = pageIds.get(j);
- }
-
- int remainingLength = pageIds.size();
- for(int j = 0; j < pageIds.size(); j++) {
- int index = Math.abs(rnd.nextInt()) % remainingLength;
- int pageId = remainingPageIds[index];
-
- // pin & latch then immediately unlatch & unpin
- ICachedPage page = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, pageId), false);
- page.acquireReadLatch();
- page.releaseReadLatch();
- bufferCache.unpin(page);
+ public TreeIndexBufferCacheWarmup(IBufferCache bufferCache, IFreePageManager freePageManager, int fileId) {
+ this.bufferCache = bufferCache;
+ this.freePageManager = freePageManager;
+ this.fileId = fileId;
+ }
- remainingPageIds[index] = remainingPageIds[remainingLength-1];
- remainingLength--;
- }
- }
- }
- }
-
- bufferCache.closeFile(fileId);
- }
+ public void warmup(ITreeIndexFrame frame, ITreeIndexMetaDataFrame metaFrame, int[] warmupTreeLevels,
+ int[] warmupRepeats) throws HyracksDataException {
+ bufferCache.openFile(fileId);
+
+ // scan entire file to determine pages in each level
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
+ for (int pageId = 0; pageId <= maxPageId; pageId++) {
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ page.acquireReadLatch();
+ try {
+ frame.setPage(page);
+ byte level = frame.getLevel();
+ while (level >= pagesByLevel.size()) {
+ pagesByLevel.add(new IntArrayList(100, 100));
+ }
+ if (level >= 0) {
+ // System.out.println("ADDING: " + level + " " + pageId);
+ pagesByLevel.get(level).add(pageId);
+ }
+ } finally {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ }
+ }
+
+ // pin certain pages again to simulate frequent access
+ for (int i = 0; i < warmupTreeLevels.length; i++) {
+ if (warmupTreeLevels[i] < pagesByLevel.size()) {
+ int repeats = warmupRepeats[i];
+ IntArrayList pageIds = pagesByLevel.get(warmupTreeLevels[i]);
+ int[] remainingPageIds = new int[pageIds.size()];
+ for (int r = 0; r < repeats; r++) {
+ for (int j = 0; j < pageIds.size(); j++) {
+ remainingPageIds[j] = pageIds.get(j);
+ }
+
+ int remainingLength = pageIds.size();
+ for (int j = 0; j < pageIds.size(); j++) {
+ int index = Math.abs(rnd.nextInt()) % remainingLength;
+ int pageId = remainingPageIds[index];
+
+ // pin & latch then immediately unlatch & unpin
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ page.acquireReadLatch();
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+
+ remainingPageIds[index] = remainingPageIds[remainingLength - 1];
+ remainingLength--;
+ }
+ }
+ }
+ }
+
+ bufferCache.closeFile(fileId);
+ }
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
index ad7166e..5b01b2d 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStats.java
@@ -2,136 +2,133 @@
import java.text.DecimalFormat;
-import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
-import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
public class TreeIndexStats {
-
- private TreeIndexNodeTypeStats rootStats = new TreeIndexNodeTypeStats();
- private TreeIndexNodeTypeStats interiorStats = new TreeIndexNodeTypeStats();
- private TreeIndexNodeTypeStats leafStats = new TreeIndexNodeTypeStats();
-
- private int freePages = 0;
- private int metaPages = 0;
- private int treeLevels = 0;
-
- public void begin() {
- rootStats.clear();
- interiorStats.clear();
- leafStats.clear();
- freePages = 0;
- metaPages = 0;
- treeLevels = 0;
- }
-
- public void addRoot(ITreeIndexFrame frame) {
- treeLevels = frame.getLevel() + 1;
- rootStats.add(frame);
- }
-
- public void add(ITreeIndexFrame frame) {
- if(frame.isLeaf()) {
- leafStats.add(frame);
- } else if(frame.isInterior()) {
- interiorStats.add(frame);
- }
- }
-
- public void add(ITreeIndexMetaDataFrame metaFrame, IFreePageManager freePageManager) {
- if(freePageManager.isFreePage(metaFrame)) {
- freePages++;
- } else if(freePageManager.isMetaPage(metaFrame)) {
- metaPages++;
- }
- }
-
- public void end() {
- // nothing here currently
- }
-
- @Override
- public String toString() {
- StringBuilder strBuilder = new StringBuilder();
- DecimalFormat df = new DecimalFormat("#####.##");
-
- strBuilder.append("TREE LEVELS: " + treeLevels + "\n");
- strBuilder.append("FREE PAGES : " + freePages + "\n");
- strBuilder.append("META PAGES : " + metaPages + "\n");
- long totalPages = interiorStats.getNumPages() + leafStats.getNumPages() + freePages + metaPages;
- strBuilder.append("TOTAL PAGES : " + totalPages + "\n");
-
- strBuilder.append("\n");
- strBuilder.append("ROOT STATS" + "\n");
- strBuilder.append("NUM TUPLES: " + rootStats.getNumTuples() + "\n");
- strBuilder.append("FILL FACTOR : " + df.format(rootStats.getAvgFillFactor()) + "\n");
-
- if(interiorStats.getNumPages() > 0) {
- strBuilder.append("\n");
- strBuilder.append("INTERIOR STATS" + "\n");
- strBuilder.append("NUM PAGES: " + interiorStats.getNumPages() + "\n");
- strBuilder.append("NUM TUPLES: " + interiorStats.getNumTuples() + "\n");
- strBuilder.append("AVG TUPLES/PAGE: " + df.format(interiorStats.getAvgNumTuples()) + "\n");
- strBuilder.append("AVG FILL FACTOR: " + df.format(interiorStats.getAvgFillFactor()) + "\n");
- }
- if(leafStats.getNumPages() > 0) {
- strBuilder.append("\n");
- strBuilder.append("LEAF STATS" + "\n");
- strBuilder.append("NUM PAGES: " + df.format(leafStats.getNumPages()) + "\n");
- strBuilder.append("NUM TUPLES: " + df.format(leafStats.getNumTuples()) + "\n");
- strBuilder.append("AVG TUPLES/PAGE: " + df.format(leafStats.getAvgNumTuples()) + "\n");
- strBuilder.append("AVG FILL FACTOR: " + df.format(leafStats.getAvgFillFactor()) + "\n");
- }
-
- return strBuilder.toString();
- }
-
- public class TreeIndexNodeTypeStats {
- private long numTuples;
- private long sumTuplesSizes;
- private long numPages;
- private double sumFillFactors;
-
- public void clear() {
- numTuples = 0;
- sumTuplesSizes = 0;
- numPages = 0;
- }
-
- public void add(ITreeIndexFrame frame) {
- numPages++;
- numTuples += frame.getTupleCount();
- sumFillFactors += (double)(frame.getBuffer().capacity() - frame.getTotalFreeSpace()) / (double)frame.getBuffer().capacity();
- }
-
- public long getNumTuples() {
- return numTuples;
- }
-
- public long getSumTupleSizes() {
- return sumTuplesSizes;
- }
-
- public long getNumPages() {
- return numPages;
- }
-
- public double getAvgNumTuples() {
- return (double)numTuples / (double)numPages;
- }
-
- public double getAvgTupleSize() {
- return (double)sumTuplesSizes / (double)numTuples;
- }
-
- public double getAvgFillFactor() {
- return sumFillFactors / numPages;
- }
- }
-
-
+ private TreeIndexNodeTypeStats rootStats = new TreeIndexNodeTypeStats();
+ private TreeIndexNodeTypeStats interiorStats = new TreeIndexNodeTypeStats();
+ private TreeIndexNodeTypeStats leafStats = new TreeIndexNodeTypeStats();
+
+ private int freePages = 0;
+ private int metaPages = 0;
+ private int treeLevels = 0;
+
+ public void begin() {
+ rootStats.clear();
+ interiorStats.clear();
+ leafStats.clear();
+ freePages = 0;
+ metaPages = 0;
+ treeLevels = 0;
+ }
+
+ public void addRoot(ITreeIndexFrame frame) {
+ treeLevels = frame.getLevel() + 1;
+ rootStats.add(frame);
+ }
+
+ public void add(ITreeIndexFrame frame) {
+ if (frame.isLeaf()) {
+ leafStats.add(frame);
+ } else if (frame.isInterior()) {
+ interiorStats.add(frame);
+ }
+ }
+
+ public void add(ITreeIndexMetaDataFrame metaFrame, IFreePageManager freePageManager) {
+ if (freePageManager.isFreePage(metaFrame)) {
+ freePages++;
+ } else if (freePageManager.isMetaPage(metaFrame)) {
+ metaPages++;
+ }
+ }
+
+ public void end() {
+ // nothing here currently
+ }
+
+ @Override
+ public String toString() {
+ StringBuilder strBuilder = new StringBuilder();
+ DecimalFormat df = new DecimalFormat("#####.##");
+
+ strBuilder.append("TREE LEVELS: " + treeLevels + "\n");
+ strBuilder.append("FREE PAGES : " + freePages + "\n");
+ strBuilder.append("META PAGES : " + metaPages + "\n");
+ long totalPages = interiorStats.getNumPages() + leafStats.getNumPages() + freePages + metaPages;
+ strBuilder.append("TOTAL PAGES : " + totalPages + "\n");
+
+ strBuilder.append("\n");
+ strBuilder.append("ROOT STATS" + "\n");
+ strBuilder.append("NUM TUPLES: " + rootStats.getNumTuples() + "\n");
+ strBuilder.append("FILL FACTOR : " + df.format(rootStats.getAvgFillFactor()) + "\n");
+
+ if (interiorStats.getNumPages() > 0) {
+ strBuilder.append("\n");
+ strBuilder.append("INTERIOR STATS" + "\n");
+ strBuilder.append("NUM PAGES: " + interiorStats.getNumPages() + "\n");
+ strBuilder.append("NUM TUPLES: " + interiorStats.getNumTuples() + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(interiorStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(interiorStats.getAvgFillFactor()) + "\n");
+ }
+
+ if (leafStats.getNumPages() > 0) {
+ strBuilder.append("\n");
+ strBuilder.append("LEAF STATS" + "\n");
+ strBuilder.append("NUM PAGES: " + df.format(leafStats.getNumPages()) + "\n");
+ strBuilder.append("NUM TUPLES: " + df.format(leafStats.getNumTuples()) + "\n");
+ strBuilder.append("AVG TUPLES/PAGE: " + df.format(leafStats.getAvgNumTuples()) + "\n");
+ strBuilder.append("AVG FILL FACTOR: " + df.format(leafStats.getAvgFillFactor()) + "\n");
+ }
+
+ return strBuilder.toString();
+ }
+
+ public class TreeIndexNodeTypeStats {
+ private long numTuples;
+ private long sumTuplesSizes;
+ private long numPages;
+ private double sumFillFactors;
+
+ public void clear() {
+ numTuples = 0;
+ sumTuplesSizes = 0;
+ numPages = 0;
+ }
+
+ public void add(ITreeIndexFrame frame) {
+ numPages++;
+ numTuples += frame.getTupleCount();
+ sumFillFactors += (double) (frame.getBuffer().capacity() - frame.getTotalFreeSpace())
+ / (double) frame.getBuffer().capacity();
+ }
+
+ public long getNumTuples() {
+ return numTuples;
+ }
+
+ public long getSumTupleSizes() {
+ return sumTuplesSizes;
+ }
+
+ public long getNumPages() {
+ return numPages;
+ }
+
+ public double getAvgNumTuples() {
+ return (double) numTuples / (double) numPages;
+ }
+
+ public double getAvgTupleSize() {
+ return (double) sumTuplesSizes / (double) numTuples;
+ }
+
+ public double getAvgFillFactor() {
+ return sumFillFactors / numPages;
+ }
+ }
+
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
index f05bc39..fc0ab5e 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/utility/TreeIndexStatsGatherer.java
@@ -10,66 +10,61 @@
public class TreeIndexStatsGatherer {
- private final TreeIndexStats treeIndexStats = new TreeIndexStats();
- private final IBufferCache bufferCache;
- private final IFreePageManager freePageManager;
- private final int fileId;
- private final int rootPage;
+ private final TreeIndexStats treeIndexStats = new TreeIndexStats();
+ private final IBufferCache bufferCache;
+ private final IFreePageManager freePageManager;
+ private final int fileId;
+ private final int rootPage;
- public TreeIndexStatsGatherer(IBufferCache bufferCache,
- IFreePageManager freePageManager, int fileId, int rootPage) {
- this.bufferCache = bufferCache;
- this.freePageManager = freePageManager;
- this.fileId = fileId;
- this.rootPage = rootPage;
- }
-
- public TreeIndexStats gatherStats(ITreeIndexFrame leafFrame,
- ITreeIndexFrame interiorFrame, ITreeIndexMetaDataFrame metaFrame)
- throws HyracksDataException {
+ public TreeIndexStatsGatherer(IBufferCache bufferCache, IFreePageManager freePageManager, int fileId, int rootPage) {
+ this.bufferCache = bufferCache;
+ this.freePageManager = freePageManager;
+ this.fileId = fileId;
+ this.rootPage = rootPage;
+ }
- bufferCache.openFile(fileId);
+ public TreeIndexStats gatherStats(ITreeIndexFrame leafFrame, ITreeIndexFrame interiorFrame,
+ ITreeIndexMetaDataFrame metaFrame) throws HyracksDataException {
- treeIndexStats.begin();
+ bufferCache.openFile(fileId);
- int maxPageId = freePageManager.getMaxPage(metaFrame);
- for (int pageId = 0; pageId <= maxPageId; pageId++) {
- ICachedPage page = bufferCache.pin(BufferedFileHandle
- .getDiskPageId(fileId, pageId), false);
- page.acquireReadLatch();
- try {
- metaFrame.setPage(page);
- leafFrame.setPage(page);
- interiorFrame.setPage(page);
-
- if (leafFrame.isLeaf()) {
- if (pageId == rootPage) {
- treeIndexStats.addRoot(leafFrame);
- }
- else {
- treeIndexStats.add(leafFrame);
- }
- } else if (interiorFrame.isInterior()) {
- if(pageId == rootPage) {
- treeIndexStats.addRoot(interiorFrame);
- }
- else {
- treeIndexStats.add(interiorFrame);
- }
- } else {
- treeIndexStats.add(metaFrame, freePageManager);
- }
+ treeIndexStats.begin();
- } finally {
- page.releaseReadLatch();
- bufferCache.unpin(page);
- }
- }
+ int maxPageId = freePageManager.getMaxPage(metaFrame);
+ for (int pageId = 0; pageId <= maxPageId; pageId++) {
+ ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+ page.acquireReadLatch();
+ try {
+ metaFrame.setPage(page);
+ leafFrame.setPage(page);
+ interiorFrame.setPage(page);
- treeIndexStats.end();
+ if (leafFrame.isLeaf()) {
+ if (pageId == rootPage) {
+ treeIndexStats.addRoot(leafFrame);
+ } else {
+ treeIndexStats.add(leafFrame);
+ }
+ } else if (interiorFrame.isInterior()) {
+ if (pageId == rootPage) {
+ treeIndexStats.addRoot(interiorFrame);
+ } else {
+ treeIndexStats.add(interiorFrame);
+ }
+ } else {
+ treeIndexStats.add(metaFrame, freePageManager);
+ }
- bufferCache.closeFile(fileId);
+ } finally {
+ page.releaseReadLatch();
+ bufferCache.unpin(page);
+ }
+ }
- return treeIndexStats;
- }
+ treeIndexStats.end();
+
+ bufferCache.closeFile(fileId);
+
+ return treeIndexStats;
+ }
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
index 45ae3a1..ebc17b5 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -17,7 +32,7 @@
public void computeMBR(ISplitKey splitKey, MultiComparator cmp);
public void insert(ITupleReference tuple, MultiComparator cmp, int tupleIndex) throws Exception;
-
+
public void delete(int tupleIndex, MultiComparator cmp) throws Exception;
public int getPageNsn();
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
index a59e411..824c6b0 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -1,8 +1,23 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.TraverseList;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
public interface IRTreeInteriorFrame extends IRTreeFrame {
@@ -14,7 +29,7 @@
public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
- public int findTupleByPointer(ITupleReference tuple, TraverseList traverseList, int parentId, MultiComparator cmp);
+ public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentId, MultiComparator cmp);
public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
index 7d66ade..c85712d 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeLeafFrame.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
index bb5d9cd..5b8931d 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelper.java
@@ -1,11 +1,24 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
@@ -34,8 +47,4 @@
return new RTree(bufferCache, freePageManager, opDesc.getTreeIndexInteriorFactory(),
opDesc.getTreeIndexLeafFactory(), cmp);
}
-
- public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
- throw new HyracksDataException("createDiskOrderScanCursor Operation not implemented.");
- }
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
index 59030f5..b26f21a 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeOpHelperFactory.java
@@ -1,27 +1,33 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOpHelperFactory;
import edu.uci.ics.hyracks.storage.am.common.dataflow.ITreeIndexOperatorDescriptorHelper;
import edu.uci.ics.hyracks.storage.am.common.dataflow.IndexHelperOpenMode;
import edu.uci.ics.hyracks.storage.am.common.dataflow.TreeIndexOpHelper;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeDiskOrderScanCursor;
public class RTreeOpHelperFactory implements ITreeIndexOpHelperFactory {
-
+
private static final long serialVersionUID = 1L;
@Override
public TreeIndexOpHelper createTreeIndexOpHelper(ITreeIndexOperatorDescriptorHelper opDesc,
- IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode) {
+ IHyracksStageletContext ctx, int partition, IndexHelperOpenMode mode) {
return new RTreeOpHelper(opDesc, ctx, partition, mode);
}
-
- public ITreeIndexCursor createDiskOrderScanCursor(ITreeIndexFrame leafFrame) throws HyracksDataException {
- return new RTreeDiskOrderScanCursor((IRTreeFrame)leafFrame);
- }
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
index 5bb0056..8aab763 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorDescriptor.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
index a98157e..6a13d9f 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/dataflow/RTreeSearchOperatorNodePushable.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.dataflow;
import java.io.DataOutput;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMInteriorFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMInteriorFrameFactory.java
deleted file mode 100644
index 690592f..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMInteriorFrameFactory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
-
-public class NSMInteriorFrameFactory implements ITreeIndexFrameFactory {
-
- private static final long serialVersionUID = 1L;
- private ITreeIndexTupleWriterFactory tupleWriterFactory;
- private int keyFieldCount;
-
- public NSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
- this.tupleWriterFactory = tupleWriterFactory;
- this.keyFieldCount = keyFieldCount;
- }
-
- @Override
- public IRTreeInteriorFrame createFrame() {
- return new NSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
- }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMLeafFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMLeafFrameFactory.java
deleted file mode 100644
index 01811d2..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMLeafFrameFactory.java
+++ /dev/null
@@ -1,22 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree.frames;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-
-public class NSMLeafFrameFactory implements ITreeIndexFrameFactory {
-
- private static final long serialVersionUID = 1L;
- private ITreeIndexTupleWriterFactory tupleWriterFactory;
- private int keyFieldCount;
-
- public NSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
- this.tupleWriterFactory = tupleWriterFactory;
- this.keyFieldCount = keyFieldCount;
- }
-
- @Override
- public IRTreeLeafFrame createFrame() {
- return new NSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
- }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
similarity index 82%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMFrame.java
rename to hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
index 9edb815..11142b4 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.frames;
import java.util.ArrayList;
@@ -13,43 +28,56 @@
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SpatialUtils;
import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-public abstract class NSMFrame extends TreeIndexNSMFrame implements IRTreeFrame {
+public abstract class RTreeNSMFrame extends TreeIndexNSMFrame implements IRTreeFrame {
protected static final int pageNsnOff = smFlagOff + 1;
protected static final int rightPageOff = pageNsnOff + 4;
protected ITreeIndexTupleReference[] tuples;
protected ITreeIndexTupleReference cmpFrameTuple;
- protected final SpatialUtils spatialUtils;
protected TupleEntryArrayList tupleEntries1; // used for split and checking
// enlargement
protected TupleEntryArrayList tupleEntries2; // used for split
+
protected Rectangle[] rec;
protected static final double splitFactor = 0.4;
protected static final int nearMinimumOverlapFactor = 32;
+ private static final double doubleEpsilon = computeDoubleEpsilon();
+ private static final int numTuplesEntries = 100;
- public NSMFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ public RTreeNSMFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
super(tupleWriter, new UnorderedSlotManager());
this.tuples = new ITreeIndexTupleReference[keyFieldCount];
for (int i = 0; i < keyFieldCount; i++) {
this.tuples[i] = tupleWriter.createTupleReference();
}
cmpFrameTuple = tupleWriter.createTupleReference();
- spatialUtils = new SpatialUtils();
- // TODO: find a better way to know number of entries per node
- tupleEntries1 = new TupleEntryArrayList(100, 100, spatialUtils);
- tupleEntries2 = new TupleEntryArrayList(100, 100, spatialUtils);
+
+ tupleEntries1 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
+ tupleEntries2 = new TupleEntryArrayList(numTuplesEntries, numTuplesEntries);
rec = new Rectangle[4];
for (int i = 0; i < 4; i++) {
rec[i] = new Rectangle(keyFieldCount / 2);
}
}
+ private static double computeDoubleEpsilon() {
+ double doubleEpsilon = 1.0;
+
+ do {
+ doubleEpsilon /= 2.0;
+ } while (1.0 + (doubleEpsilon / 2.0) != 1.0);
+ return doubleEpsilon;
+ }
+
+ public static double doubleEpsilon() {
+ return doubleEpsilon;
+ }
+
@Override
public void initBuffer(byte level) {
super.initBuffer(level);
@@ -130,11 +158,7 @@
@Override
public void insertSorted(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
- try {
- insert(tuple, cmp, -1);
- } catch (Exception e) {
- e.printStackTrace();
- }
+
}
@Override
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
similarity index 95%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMInteriorFrame.java
rename to hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index 7cd8bc3..d03c5e9 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.frames;
import java.io.ByteArrayInputStream;
@@ -21,16 +36,16 @@
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.TraverseList;
import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-public class NSMInteriorFrame extends NSMFrame implements IRTreeInteriorFrame {
+public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
private static final int childPtrSize = 4;
- public NSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ public RTreeNSMInteriorFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
super(tupleWriter, keyFieldCount);
}
@@ -75,8 +90,7 @@
bestChild = i;
}
}
- if (minEnlargedArea < tupleEntries1.getDoubleEpsilon()
- || minEnlargedArea > tupleEntries1.getDoubleEpsilon()) {
+ if (minEnlargedArea < RTreeNSMFrame.doubleEpsilon() || minEnlargedArea > RTreeNSMFrame.doubleEpsilon()) {
minEnlargedArea = Double.MAX_VALUE;
int k;
if (getTupleCount() > nearMinimumOverlapFactor) {
@@ -153,7 +167,7 @@
return false;
}
}
-
+
@Override
public int getBestChildPageId(MultiComparator cmp) {
return buf.getInt(getChildPointerOff(frameTuple, cmp));
@@ -171,7 +185,7 @@
}
return -1;
}
-
+
@Override
public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
frameTuple.setFieldCount(cmp.getKeyFieldCount());
@@ -196,7 +210,7 @@
}
@Override
- public int findTupleByPointer(ITupleReference tuple, TraverseList traverseList, int parentIndex, MultiComparator cmp) {
+ public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp) {
frameTuple.setFieldCount(cmp.getKeyFieldCount());
for (int i = 0; i < getTupleCount(); i++) {
frameTuple.resetByTupleIndex(this, i);
@@ -264,8 +278,6 @@
return FrameOpSpaceStatus.INSUFFICIENT_SPACE;
}
-
-
@Override
public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
frameTuple.setFieldCount(cmp.getKeyFieldCount());
@@ -396,7 +408,7 @@
}
boolean tupleInserted = false;
int totalBytes = 0, numOfDeletedTuples = 0;
- for (int i = startIndex; i < endIndex; i++) {
+ for (int i = startIndex; i < endIndex; i++) {
if (tupleEntries1.get(i).getTupleIndex() != -1) {
frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
rightFrame.insert(frameTuple, cmp, -1);
@@ -433,8 +445,8 @@
rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
- ((IRTreeFrame) rightFrame).adjustMBR(((NSMFrame) rightFrame).getTuples(), cmp);
- rTreeTupleWriterRightFrame.writeTupleFields(((NSMFrame) rightFrame).getTuples(), 0,
+ ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples(), cmp);
+ rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
rTreeSplitKey.getRightPageBuffer(), 0);
rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
new file mode 100644
index 0000000..9fd5a54
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrameFactory.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
+
+public class RTreeNSMInteriorFrameFactory implements ITreeIndexFrameFactory {
+
+ private static final long serialVersionUID = 1L;
+ private ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private int keyFieldCount;
+
+ public RTreeNSMInteriorFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ this.keyFieldCount = keyFieldCount;
+ }
+
+ @Override
+ public IRTreeInteriorFrame createFrame() {
+ return new RTreeNSMInteriorFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
similarity index 91%
rename from hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMLeafFrame.java
rename to hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
index 14938b3..7ebe974 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/NSMLeafFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.frames;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -14,9 +29,9 @@
import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
-public class NSMLeafFrame extends NSMFrame implements IRTreeLeafFrame {
+public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
- public NSMLeafFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
+ public RTreeNSMLeafFrame(ITreeIndexTupleWriter tupleWriter, int keyFieldCount) {
super(tupleWriter, keyFieldCount);
}
@@ -199,8 +214,8 @@
rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
- ((IRTreeFrame) rightFrame).adjustMBR(((NSMFrame) rightFrame).getTuples(), cmp);
- rTreeTupleWriterRightFrame.writeTupleFields(((NSMFrame) rightFrame).getTuples(), 0,
+ ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples(), cmp);
+ rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
rTreeSplitKey.getRightPageBuffer(), 0);
rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
new file mode 100644
index 0000000..7da7f26
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrameFactory.java
@@ -0,0 +1,37 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.frames;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+
+public class RTreeNSMLeafFrameFactory implements ITreeIndexFrameFactory {
+
+ private static final long serialVersionUID = 1L;
+ private ITreeIndexTupleWriterFactory tupleWriterFactory;
+ private int keyFieldCount;
+
+ public RTreeNSMLeafFrameFactory(ITreeIndexTupleWriterFactory tupleWriterFactory, int keyFieldCount) {
+ this.tupleWriterFactory = tupleWriterFactory;
+ this.keyFieldCount = keyFieldCount;
+ }
+
+ @Override
+ public IRTreeLeafFrame createFrame() {
+ return new RTreeNSMLeafFrame(tupleWriterFactory.createTupleWriter(), keyFieldCount);
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/CursorInitialState.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/CursorInitialState.java
deleted file mode 100644
index 9d77288..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/CursorInitialState.java
+++ /dev/null
@@ -1,36 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-
-public class CursorInitialState implements ICursorInitialState {
-
- private PathList pathList;
- private int rootPage;
- private ICachedPage page; // for disk order scan
-
- public CursorInitialState(PathList pathList, int rootPage) {
- this.pathList = pathList;
- this.rootPage = rootPage;
- }
-
- public PathList getPathList() {
- return pathList;
- }
-
- public int getRootPage() {
- return rootPage;
- }
-
- public void setRootPage(int rootPage) {
- this.rootPage = rootPage;
- }
-
- public ICachedPage getPage() {
- return page;
- }
-
- public void setPage(ICachedPage page) {
- this.page = page;
- }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
index a04f23e..8dce1ac 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/EntriesOrder.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
public enum EntriesOrder {
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
index 80ddfab..58bad69 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
@@ -21,7 +21,7 @@
private IntArrayList pageIds;
private IntArrayList pageLsns;
private IntArrayList pageIndexes;
-
+
public PathList(int initialCapacity, int growth) {
pageIds = new IntArrayList(initialCapacity, growth);
pageLsns = new IntArrayList(initialCapacity, growth);
@@ -32,20 +32,36 @@
return pageIds.size();
}
+ public int first() {
+ return pageIds.first();
+ }
+
public void add(int pageId, int pageLsn, int pageIndex) {
pageIds.add(pageId);
pageLsns.add(pageLsn);
pageIndexes.add(pageIndex);
}
+ public int getFirstPageId() {
+ return pageIds.getFirst();
+ }
+
+ public int getFirstPageLsn() {
+ return pageLsns.getFirst();
+ }
+
+ public int getFirstPageIndex() {
+ return pageIndexes.getFirst();
+ }
+
public int getLastPageId() {
return pageIds.getLast();
}
-
+
public int getLastPageLsn() {
return pageLsns.getLast();
}
-
+
public int getLastPageIndex() {
return pageIndexes.getLast();
}
@@ -53,21 +69,35 @@
public int getPageId(int i) {
return pageIds.get(i);
}
-
+
public int getPageLsn(int i) {
return pageLsns.get(i);
}
-
+
public int getPageIndex(int i) {
return pageIndexes.get(i);
}
-
- public void removeLast() {
+
+ public void setPageLsn(int i, int pageLsn) {
+ pageLsns.set(i, pageLsn);
+ }
+
+ public void moveFirst() {
+ pageIds.moveFirst();
+ pageLsns.moveFirst();
+ pageIndexes.moveFirst();
+ }
+
+ public void moveLast() {
pageIds.removeLast();
pageLsns.removeLast();
pageIndexes.removeLast();
}
-
+
+ public boolean isLast() {
+ return pageIds.isLast();
+ }
+
public void clear() {
pageIds.clear();
pageLsns.clear();
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
index 5494217..0d24828 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -1,7 +1,21 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import java.util.ArrayList;
-import java.util.Stack;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReadWriteLock;
@@ -19,13 +33,14 @@
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOpContext;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.NSMFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
@@ -161,7 +176,7 @@
System.out.format(keyString);
if (!interiorFrame.isLeaf()) {
- ArrayList<Integer> children = ((NSMFrame) (interiorFrame)).getChildren(cmp);
+ ArrayList<Integer> children = ((RTreeNSMFrame) (interiorFrame)).getChildren(cmp);
for (int i = 0; i < children.size(); i++) {
printTree(children.get(i), node, i == children.size() - 1, leafFrame, interiorFrame, fields);
}
@@ -244,7 +259,7 @@
ICachedPage leafNode = findLeaf(ctx);
int pageId = ctx.pathList.getLastPageId();
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
while (true) {
@@ -316,7 +331,7 @@
if (pageId != rootPage) {
parentLsn = ctx.pathList.getPageLsn(ctx.pathList.size() - 2);
}
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
continue;
}
@@ -346,7 +361,7 @@
// The page was changed while we unlocked it; thus,
// retry (re-choose best child)
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
continue;
}
}
@@ -455,8 +470,6 @@
} else {
ctx.splitKey.setPages(pageId, rightPageId);
}
- // }
- } finally {
if (pageId == rootPage) {
rootSplits++; // debug
splitsByLevel[currentLevel]++;
@@ -495,6 +508,7 @@
ctx.splitKey.reset();
}
+ } finally {
rightNode.releaseWriteLatch();
incrementWriteLatchesReleased();
bufferCache.unpin(rightNode);
@@ -543,7 +557,7 @@
if (foundParent) {
ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, cmp);
insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
parentNode.releaseWriteLatch();
incrementWriteLatchesReleased();
@@ -569,7 +583,7 @@
ctx.traverseList.add(pageId, -1, parentIndex);
while (!ctx.traverseList.isLast()) {
pageId = ctx.traverseList.getFirstPageId();
- parentIndex = ctx.traverseList.getFirstParentIndex();
+ parentIndex = ctx.traverseList.getFirstPageIndex();
ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
incrementPins();
@@ -608,7 +622,7 @@
public void fillPath(RTreeOpContext ctx, int pageIndex) throws Exception {
if (pageIndex != -1) {
- fillPath(ctx, ctx.traverseList.getParentIndex(pageIndex));
+ fillPath(ctx, ctx.traverseList.getPageIndex(pageIndex));
ctx.pathList.add(ctx.traverseList.getPageId(pageIndex), ctx.traverseList.getPageLsn(pageIndex), -1);
}
}
@@ -627,7 +641,7 @@
if (tupleIndex != -1) {
int pageId = ctx.pathList.getLastPageId();
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
deleteTuple(pageId, tupleIndex, ctx);
while (true) {
@@ -690,7 +704,7 @@
if (recomputeMBR) {
ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), tupleIndex, cmp);
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
incrementGlobalNsn();
ctx.interiorFrame.setPageLsn(getGlobalNsn());
@@ -701,7 +715,7 @@
ctx.splitKey.setLeftPage(parentId);
}
} else {
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
ctx.splitKey.reset();
}
@@ -730,7 +744,7 @@
int pageId = ctx.pathList.getLastPageId();
int parentLsn = ctx.pathList.getLastPageLsn();
int pageIndex = ctx.pathList.getLastPageIndex();
- ctx.pathList.removeLast();
+ ctx.pathList.moveLast();
ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
incrementPins();
node.acquireReadLatch();
@@ -738,7 +752,7 @@
ctx.interiorFrame.setPage(node);
boolean isLeaf = ctx.interiorFrame.isLeaf();
int pageLsn = ctx.interiorFrame.getPageLsn();
- int parentIndex = ctx.traverseList.getParentIndex(pageIndex);
+ int parentIndex = ctx.traverseList.getPageIndex(pageIndex);
ctx.traverseList.setPageLsn(pageIndex, pageLsn);
if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
@@ -890,7 +904,7 @@
@Override
public void diskOrderScan(ITreeIndexCursor icursor, ITreeIndexFrame leafFrame, ITreeIndexMetaDataFrame metaFrame,
IndexOpContext ictx) throws HyracksDataException {
- RTreeDiskOrderScanCursor cursor = (RTreeDiskOrderScanCursor) icursor;
+ TreeDiskOrderScanCursor cursor = (TreeDiskOrderScanCursor) icursor;
RTreeOpContext ctx = (RTreeOpContext) ictx;
ctx.reset();
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
new file mode 100644
index 0000000..8b94a04
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeCursorInitialState.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class RTreeCursorInitialState implements ICursorInitialState {
+
+ private PathList pathList;
+ private int rootPage;
+ private ICachedPage page; // for disk order scan
+
+ public RTreeCursorInitialState(PathList pathList, int rootPage) {
+ this.pathList = pathList;
+ this.rootPage = rootPage;
+ }
+
+ public PathList getPathList() {
+ return pathList;
+ }
+
+ public int getRootPage() {
+ return rootPage;
+ }
+
+ public void setRootPage(int rootPage) {
+ this.rootPage = rootPage;
+ }
+
+ public ICachedPage getPage() {
+ return page;
+ }
+
+ public void setPage(ICachedPage page) {
+ this.page = page;
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeDiskOrderScanCursor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeDiskOrderScanCursor.java
deleted file mode 100644
index f852905..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeDiskOrderScanCursor.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
-import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
-import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
-
-public class RTreeDiskOrderScanCursor implements ITreeIndexCursor {
-
- // TODO: might want to return tuples in physical order, not logical order to
- // speed up access
-
- private int tupleIndex = 0;
- private int fileId = -1;
- int currentPageId = -1;
- int maxPageId = -1; // TODO: figure out how to scan to the end of file, this
- // is dirty and may not with concurrent updates
- private ICachedPage page = null;
- private IRTreeFrame frame = null;
- private IBufferCache bufferCache = null;
-
- private ITreeIndexTupleReference frameTuple;
-
- public RTreeDiskOrderScanCursor(IRTreeFrame frame) {
- this.frame = frame;
- this.frameTuple = frame.getTupleWriter().createTupleReference();
- }
-
- @Override
- public void close() throws Exception {
- page.releaseReadLatch();
- bufferCache.unpin(page);
- page = null;
- }
-
- @Override
- public ITreeIndexTupleReference getTuple() {
- return frameTuple;
- }
-
- @Override
- public ICachedPage getPage() {
- return page;
- }
-
- private boolean positionToNextLeaf(boolean skipCurrent) throws HyracksDataException {
- while ((frame.getLevel() != 0 || skipCurrent) && (currentPageId <= maxPageId) || (frame.getTupleCount() == 0)) {
- currentPageId++;
-
- ICachedPage nextPage = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, currentPageId), false);
- nextPage.acquireReadLatch();
-
- page.releaseReadLatch();
- bufferCache.unpin(page);
-
- page = nextPage;
- frame.setPage(page);
- tupleIndex = 0;
- skipCurrent = false;
- }
- if (currentPageId <= maxPageId)
- return true;
- else
- return false;
- }
-
- @Override
- public boolean hasNext() throws Exception {
- if (tupleIndex >= frame.getTupleCount()) {
- boolean nextLeafExists = positionToNextLeaf(true);
- if (nextLeafExists) {
- frameTuple.resetByTupleIndex(frame, tupleIndex);
- return true;
- } else {
- return false;
- }
- }
-
- frameTuple.resetByTupleIndex(frame, tupleIndex);
- return true;
- }
-
- @Override
- public void next() throws Exception {
- tupleIndex++;
- }
-
- @Override
- public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
- // in case open is called multiple times without closing
- if (page != null) {
- page.releaseReadLatch();
- bufferCache.unpin(page);
- }
-
- page = ((CursorInitialState) initialState).getPage();
- tupleIndex = 0;
- frame.setPage(page);
- SearchPredicate pred = (SearchPredicate) searchPred;
- MultiComparator cmp = pred.getCmp();
- frameTuple.setFieldCount(cmp.getFieldCount());
- boolean leafExists = positionToNextLeaf(false);
- if (!leafExists) {
- throw new HyracksDataException(
- "Failed to open disk-order scan cursor for R-tree. Traget R-tree has no leaves.");
- }
- }
-
- @Override
- public void reset() {
- tupleIndex = 0;
- currentPageId = -1;
- maxPageId = -1;
- page = null;
- }
-
- @Override
- public void setBufferCache(IBufferCache bufferCache) {
- this.bufferCache = bufferCache;
- }
-
- @Override
- public void setFileId(int fileId) {
- this.fileId = fileId;
- }
-
- public void setCurrentPageId(int currentPageId) {
- this.currentPageId = currentPageId;
- }
-
- public void setMaxPageId(int maxPageId) {
- this.maxPageId = maxPageId;
- }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
index 5ad9965..ea8af28 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -13,13 +28,14 @@
public final IRTreeInteriorFrame interiorFrame;
public final IRTreeLeafFrame leafFrame;
public ITreeIndexCursor cursor;
- public CursorInitialState cursorInitialState;
+ public RTreeCursorInitialState cursorInitialState;
public final ITreeIndexMetaDataFrame metaFrame;
public final RTreeSplitKey splitKey;
public ITupleReference tuple;
public final PathList pathList; // used to record the pageIds and pageLsns
// of the visited pages
- public final TraverseList traverseList; // used for traversing the tree
+ public final PathList traverseList; // used for traversing the tree
+ private static final int initTraverseListSize = 100;
public RTreeOpContext(IndexOp op, IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
ITreeIndexMetaDataFrame metaFrame, int treeHeightHint) {
@@ -31,11 +47,11 @@
if (op != IndexOp.SEARCH && op != IndexOp.DISKORDERSCAN) {
splitKey = new RTreeSplitKey(interiorFrame.getTupleWriter().createTupleReference(), interiorFrame
.getTupleWriter().createTupleReference());
- traverseList = new TraverseList(100, 100);
+ traverseList = new PathList(initTraverseListSize, initTraverseListSize);
} else {
splitKey = null;
traverseList = null;
- cursorInitialState = new CursorInitialState(pathList, 1);
+ cursorInitialState = new RTreeCursorInitialState(pathList, 1);
}
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
index 481c034..4edcaf8 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSearchCursor.java
@@ -1,8 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
@@ -68,7 +82,7 @@
while (!pathList.isEmpty()) {
int pageId = pathList.getLastPageId();
int parentLsn = pathList.getLastPageLsn();
- pathList.removeLast();
+ pathList.moveLast();
ICachedPage node = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
pin++;
node.acquireReadLatch();
@@ -114,7 +128,7 @@
if (page == null) {
return false;
}
-
+
if (tupleIndex == leafFrame.getTupleCount()) {
if (!fetchNextLeafPage()) {
return false;
@@ -147,13 +161,13 @@
pathList.clear();
}
- pathList = ((CursorInitialState) initialState).getPathList();
- rootPage = ((CursorInitialState) initialState).getRootPage();
+ pathList = ((RTreeCursorInitialState) initialState).getPathList();
+ rootPage = ((RTreeCursorInitialState) initialState).getRootPage();
pred = (SearchPredicate) searchPred;
- cmp = pred.getCmp();
+ cmp = pred.getLowKeyComparator();
searchKey = pred.getSearchKey();
-
+
pathList.add(this.rootPage, -1, -1);
frameTuple.setFieldCount(cmp.getFieldCount());
tupleIndex = 0;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
index b130e18..f220ea3 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeSplitKey.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import java.nio.ByteBuffer;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
index 4fa9179..1fb86a9 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/Rectangle.java
@@ -1,8 +1,22 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
public class Rectangle {
private int dim;
@@ -67,18 +81,15 @@
}
return margin;
}
-
+
public double overlappedArea(ITupleReference tuple) {
double area = 1.0;
double f1, f2;
-
- for (int i = 0; i < getDim(); i++)
- {
+
+ for (int i = 0; i < getDim(); i++) {
int j = getDim() + i;
- double low = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i),
- tuple.getFieldStart(i));
- double high = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j),
- tuple.getFieldStart(j));
+ double low = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(i), tuple.getFieldStart(i));
+ double high = DoubleSerializerDeserializer.getDouble(tuple.getFieldData(j), tuple.getFieldStart(j));
if (getLow(i) > high || getHigh(i) < low) {
return 0.0;
}
@@ -88,13 +99,12 @@
}
return area;
}
-
+
public double overlappedArea(Rectangle rec) {
double area = 1.0;
double f1, f2;
-
- for (int i = 0; i < getDim(); i++)
- {
+
+ for (int i = 0; i < getDim(); i++) {
if (getLow(i) > rec.getHigh(i) || getHigh(i) < rec.getLow(i)) {
return 0.0;
}
@@ -105,7 +115,7 @@
}
return area;
}
-
+
public double area(ITupleReference tuple) {
double area = 1.0;
for (int i = 0; i < getDim(); i++) {
@@ -115,7 +125,7 @@
}
return area;
}
-
+
public double area() {
double area = 1.0;
for (int i = 0; i < getDim(); i++) {
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
index 4ad6223..4573b7f 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SearchPredicate.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -24,7 +39,11 @@
this.searchKey = searchKey;
}
- public MultiComparator getCmp() {
+ public MultiComparator getLowKeyComparator() {
+ return cmp;
+ }
+
+ public MultiComparator getHighKeyComparator() {
return cmp;
}
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SpatialUtils.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SpatialUtils.java
deleted file mode 100644
index 791999f..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/SpatialUtils.java
+++ /dev/null
@@ -1,18 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-
-public class SpatialUtils {
- private double doubleEpsilon;
-
- public SpatialUtils() {
- double temp = 0.5;
- while (1 + temp > 1) {
- temp /= 2;
- }
- this.doubleEpsilon = temp;
- }
-
- public double getDoubleEpsilon() {
- return doubleEpsilon;
- }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TraverseList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TraverseList.java
deleted file mode 100644
index 6a4d45c..0000000
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TraverseList.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.rtree.impls;
-
-import edu.uci.ics.hyracks.storage.am.common.ophelpers.IntArrayList;
-
-public class TraverseList {
- private IntArrayList pageIds;
- private IntArrayList pageLsns;
- private IntArrayList parentIndexes;
-
- public TraverseList(int initialCapacity, int growth) {
- pageIds = new IntArrayList(initialCapacity, growth);
- pageLsns = new IntArrayList(initialCapacity, growth);
- parentIndexes = new IntArrayList(initialCapacity, growth);
- }
-
- public int size() {
- return pageIds.size();
- }
-
- public int first() {
- return pageIds.first();
- }
-
- public void add(int pageId, int pageLsn, int parentIndex) {
- pageIds.add(pageId);
- pageLsns.add(pageLsn);
- parentIndexes.add(parentIndex);
- }
-
- public int getFirstPageId() {
- return pageIds.getFirst();
- }
-
- public int getFirstPageLsn() {
- return pageLsns.getFirst();
- }
-
- public int getFirstParentIndex() {
- return parentIndexes.getFirst();
- }
-
- public int getLastPageId() {
- return pageIds.getLast();
- }
-
- public int getLastPageLsn() {
- return pageLsns.getLast();
- }
-
- public int getLastParentIndex() {
- return parentIndexes.getLast();
- }
-
- public int getPageId(int i) {
- return pageIds.get(i);
- }
-
- public int getPageLsn(int i) {
- return pageLsns.get(i);
- }
-
- public int getParentIndex(int i) {
- return parentIndexes.get(i);
- }
-
- public void setPageLsn(int i, int pageLsn) {
- pageLsns.set(i, pageLsn);
- }
-
- public void moveFirst() {
- pageIds.moveFirst();
- pageLsns.moveFirst();
- parentIndexes.moveFirst();
- }
-
- public void moveLast() {
- pageIds.removeLast();
- pageLsns.removeLast();
- parentIndexes.removeLast();
- }
-
- public boolean isLast() {
- return pageIds.isLast();
- }
-
- public void clear() {
- pageIds.clear();
- pageLsns.clear();
- parentIndexes.clear();
- }
-
- public boolean isEmpty() {
- return pageIds.isEmpty();
- }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
index 326fc4c..0cb3de8 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntry.java
@@ -1,41 +1,51 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
-public class TupleEntry implements Comparable <TupleEntry> {
+public class TupleEntry implements Comparable<TupleEntry> {
private int tupleIndex;
private double value;
- private final double doubleEpsilon;
-
- public TupleEntry(double doubleEpsilon) {
- this.doubleEpsilon = doubleEpsilon;
+
+ public TupleEntry() {
}
-
+
public int getTupleIndex() {
return tupleIndex;
}
-
+
public void setTupleIndex(int tupleIndex) {
this.tupleIndex = tupleIndex;
}
-
+
public double getValue() {
return value;
}
-
+
public void setValue(double value) {
this.value = value;
}
-
- public double getDoubleEpsilon() {
- return doubleEpsilon;
- }
public int compareTo(TupleEntry tupleEntry) {
double cmp = this.getValue() - tupleEntry.getValue();
- if (cmp > getDoubleEpsilon())
+ if (cmp > RTreeNSMFrame.doubleEpsilon())
return 1;
cmp = tupleEntry.getValue() - this.getValue();
- if (cmp > getDoubleEpsilon())
+ if (cmp > RTreeNSMFrame.doubleEpsilon())
return -1;
return 0;
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
index 2eca75b..8be8251 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/TupleEntryArrayList.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import java.util.Arrays;
@@ -7,19 +22,13 @@
private TupleEntry[] data;
private int size;
private final int growth;
- private final double doubleEpsilon;
- public TupleEntryArrayList(int initialCapacity, int growth, SpatialUtils spatialUtils) {
- doubleEpsilon = spatialUtils.getDoubleEpsilon();
+ public TupleEntryArrayList(int initialCapacity, int growth) {
data = new TupleEntry[initialCapacity];
size = 0;
this.growth = growth;
}
- public double getDoubleEpsilon() {
- return doubleEpsilon;
- }
-
public int size() {
return size;
}
@@ -31,7 +40,7 @@
data = newData;
}
if (data[size] == null) {
- data[size] = new TupleEntry(doubleEpsilon);
+ data[size] = new TupleEntry();
}
data[size].setTupleIndex(tupleIndex);
data[size].setValue(value);
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
index 1a9c274..d2b1c53 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.impls;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
@@ -6,7 +21,7 @@
import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleMode;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.FindTupleNoExactMatchPolicy;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.NSMFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMFrame;
public class UnorderedSlotManager extends AbstractSlotManager {
@Override
@@ -83,12 +98,12 @@
while (slotOff >= getSlotEndOff()) {
if (frame.getBuffer().getInt(slotOff) == -1) {
while (frame.getBuffer().getInt(getSlotEndOff()) == -1) {
- ((NSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
+ ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
}
if (slotOff > getSlotEndOff()) {
System.arraycopy(frame.getBuffer().array(), getSlotEndOff(), frame.getBuffer().array(), slotOff,
slotSize);
- ((NSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
+ ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
} else {
break;
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
index b2cf5e9..96820c9 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriter.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.tuples;
import java.nio.ByteBuffer;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
index 18fbd71..e2e99c7 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tuples/RTreeTypeAwareTupleWriterFactory.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree.tuples;
import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
index ace0bb8..4c6bb8c 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
@@ -45,9 +45,8 @@
import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
-import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeDiskOrderScanCursor;
-import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
@@ -58,6 +57,7 @@
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.common.tuples.SimpleTupleWriterFactory;
@@ -69,1275 +69,1156 @@
public class BTreeTest extends AbstractBTreeTest {
- private static final int PAGE_SIZE = 256;
- private static final int NUM_PAGES = 10;
- private static final int MAX_OPEN_FILES = 10;
- private static final int HYRACKS_FRAME_SIZE = 128;
- private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
- // FIXED-LENGTH KEY TEST
- // create a B-tree with one fixed-length "key" field and one fixed-length
- // "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test01() throws Exception {
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
- print("FIXED-LENGTH KEY TEST\n");
+ // FIXED-LENGTH KEY TEST
+ // create a B-tree with one fixed-length "key" field and one fixed-length
+ // "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
+ @Test
+ public void test01() throws Exception {
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ print("FIXED-LENGTH KEY TEST\n");
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
- IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- Random rnd = new Random();
- rnd.setSeed(50);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- long start = System.currentTimeMillis();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- print("INSERTING INTO TREE\n");
+ Random rnd = new Random();
+ rnd.setSeed(50);
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ long start = System.currentTimeMillis();
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ print("INSERTING INTO TREE\n");
- BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
- leafFrame, interiorFrame, metaFrame);
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- // 10000
- for (int i = 0; i < 10000; i++) {
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- int f0 = rnd.nextInt() % 10000;
- int f1 = 5;
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
+ // 10000
+ for (int i = 0; i < 10000; i++) {
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ int f0 = rnd.nextInt() % 10000;
+ int f1 = 5;
- tuple.reset(accessor, 0);
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
- // System.out.println(tuple.getFieldCount() + " " +
- // tuple.getFieldLength(0) + " " + tuple.getFieldLength(1));
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- if (i % 1000 == 0) {
- long end = System.currentTimeMillis();
- print("INSERTING " + i + " : " + f0 + " " + f1 + " "
- + (end - start) + "\n");
- }
+ tuple.reset(accessor, 0);
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (TreeIndexException e) {
- } catch (Exception e) {
- e.printStackTrace();
- }
+ // System.out.println(tuple.getFieldCount() + " " +
+ // tuple.getFieldLength(0) + " " + tuple.getFieldLength(1));
- // btree.printTree(leafFrame, interiorFrame);
- // System.out.println();
- }
- // btree.printTree(leafFrame, interiorFrame);
- // System.out.println();
-
- int maxPage = btree.getFreePageManager().getMaxPage(metaFrame);
- System.out.println("MAXPAGE: " + maxPage);
+ if (i % 1000 == 0) {
+ long end = System.currentTimeMillis();
+ print("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start) + "\n");
+ }
- String stats = btree.printStats();
- print(stats);
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ // btree.printTree(leafFrame, interiorFrame);
+ // System.out.println();
+ }
+ // btree.printTree(leafFrame, interiorFrame);
+ // System.out.println();
- // ordered scan
+ int maxPage = btree.getFreePageManager().getMaxPage(metaFrame);
+ System.out.println("MAXPAGE: " + maxPage);
- print("ORDERED SCAN:\n");
- ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ String stats = btree.printStats();
+ print(stats);
- // disk-order scan
- print("DISK-ORDER SCAN:\n");
- BTreeDiskOrderScanCursor diskOrderCursor = new BTreeDiskOrderScanCursor(leafFrame);
- BTreeOpContext diskOrderScanOpCtx = btree.createOpContext(IndexOp.DISKORDERSCAN,
- leafFrame, null, null);
- btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
- try {
- while (diskOrderCursor.hasNext()) {
- diskOrderCursor.next();
- ITupleReference frameTuple = diskOrderCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- diskOrderCursor.close();
- }
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ print("DURATION: " + duration + "\n");
- // range search in [-1000, 1000]
- print("RANGE SEARCH:\n");
+ // ordered scan
- ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
+ print("ORDERED SCAN:\n");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ // disk-order scan
+ print("DISK-ORDER SCAN:\n");
+ TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
+ BTreeOpContext diskOrderScanOpCtx = btree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
+ btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ diskOrderCursor.close();
+ }
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ // range search in [-1000, 1000]
+ print("RANGE SEARCH:\n");
- appender.reset(frame, true);
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ appender.reset(frame, true);
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
- print("\n");
- }
-
- // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
- // create a B-tree with one two fixed-length "key" fields and one
- // fixed-length "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test02() throws Exception {
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- print("COMPOSITE KEY TEST\n");
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
+ print("\n");
+ }
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
+ // create a B-tree with one two fixed-length "key" fields and one
+ // fixed-length "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
+ @Test
+ public void test02() throws Exception {
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ print("COMPOSITE KEY TEST\n");
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
- IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- Random rnd = new Random();
- rnd.setSeed(50);
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- long start = System.currentTimeMillis();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- print("INSERTING INTO TREE\n");
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
- leafFrame, interiorFrame, metaFrame);
+ Random rnd = new Random();
+ rnd.setSeed(50);
- for (int i = 0; i < 10000; i++) {
- int f0 = rnd.nextInt() % 2000;
- int f1 = rnd.nextInt() % 1000;
- int f2 = 5;
+ long start = System.currentTimeMillis();
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
+ print("INSERTING INTO TREE\n");
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- tuple.reset(accessor, 0);
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- if (i % 1000 == 0) {
- print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
- }
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (Exception e) {
- }
- }
- // btree.printTree(leafFrame, interiorFrame);
+ for (int i = 0; i < 10000; i++) {
+ int f0 = rnd.nextInt() % 2000;
+ int f1 = rnd.nextInt() % 1000;
+ int f2 = 5;
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
- // try a simple index scan
- print("ORDERED SCAN:\n");
- ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ tuple.reset(accessor, 0);
- // range search in [(-3),(3)]
- print("RANGE SEARCH:\n");
- ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
+ }
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (Exception e) {
+ }
+ }
+ // btree.printTree(leafFrame, interiorFrame);
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ print("DURATION: " + duration + "\n");
- appender.reset(frame, true);
+ // try a simple index scan
+ print("ORDERED SCAN:\n");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ // range search in [(-3),(3)]
+ print("RANGE SEARCH:\n");
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
- // only
- // a
- // single
- // comparator
- // for
- // searching
+ appender.reset(frame, true);
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- print("\n");
- }
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- // VARIABLE-LENGTH TEST
- // create a B-tree with one variable-length "key" field and one
- // variable-length "value" field
- // fill B-tree with random values using insertions (not bulk load)
- // perform ordered scan and range search
- @Test
- public void test03() throws Exception {
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
+ // only
+ // a
+ // single
+ // comparator
+ // for
+ // searching
- print("VARIABLE-LENGTH KEY TEST\n");
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ print("\n");
+ }
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ // VARIABLE-LENGTH TEST
+ // create a B-tree with one variable-length "key" field and one
+ // variable-length "value" field
+ // fill B-tree with random values using insertions (not bulk load)
+ // perform ordered scan and range search
+ @Test
+ public void test03() throws Exception {
- SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
- // TypeAwareTupleWriterFactory tupleWriterFactory = new
- // TypeAwareTupleWriterFactory(typeTraits);
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ print("VARIABLE-LENGTH KEY TEST\n");
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
- IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- Random rnd = new Random();
- rnd.setSeed(50);
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- ISerializerDeserializer[] recDescSers = {
- UTF8StringSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
+ // TypeAwareTupleWriterFactory tupleWriterFactory = new
+ // TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
- leafFrame, interiorFrame, metaFrame);
- int maxLength = 10; // max string length to be generated
- for (int i = 0; i < 10000; i++) {
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1,
- rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1,
- rnd);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ Random rnd = new Random();
+ rnd.setSeed(50);
- tuple.reset(accessor, 0);
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- if (i % 1000 == 0) {
- // print("INSERTING " + i + ": " + cmp.printRecord(record, 0) +
- // "\n");
- print("INSERTING " + i + "\n");
- }
+ ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (Exception e) {
- // e.printStackTrace();
- }
- }
- // btree.printTree();
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+ int maxLength = 10; // max string length to be generated
+ for (int i = 0; i < 10000; i++) {
- System.out.println("DONE INSERTING");
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- // ordered scan
- print("ORDERED SCAN:\n");
- ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- // range search in ["cbf", cc7"]
- print("RANGE SEARCH:\n");
+ tuple.reset(accessor, 0);
- ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
+ if (i % 1000 == 0) {
+ // print("INSERTING " + i + ": " + cmp.printRecord(record, 0) +
+ // "\n");
+ print("INSERTING " + i + "\n");
+ }
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (Exception e) {
+ // e.printStackTrace();
+ }
+ }
+ // btree.printTree();
- ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ System.out.println("DONE INSERTING");
- appender.reset(frame, true);
+ // ordered scan
+ print("ORDERED SCAN:\n");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
- // build and append low key
- ktb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- // build and append high key
- ktb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ // range search in ["cbf", cc7"]
+ print("RANGE SEARCH:\n");
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ print("\n");
+ }
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ // DELETION TEST
+ // create a B-tree with one variable-length "key" field and one
+ // variable-length "value" field
+ // fill B-tree with random values using insertions, then delete entries
+ // one-by-one
+ // repeat procedure a few times on same B-tree
+ @Test
+ public void test04() throws Exception {
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ print("DELETION TEST\n");
- print("\n");
- }
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- // DELETION TEST
- // create a B-tree with one variable-length "key" field and one
- // variable-length "value" field
- // fill B-tree with random values using insertions, then delete entries
- // one-by-one
- // repeat procedure a few times on same B-tree
- @Test
- public void test04() throws Exception {
+ // declare fields
+ int fieldCount = 2;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- print("DELETION TEST\n");
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- // declare fields
- int fieldCount = 2;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
- typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- // declare keys
- int keyFieldCount = 1;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
- IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+ Random rnd = new Random();
+ rnd.setSeed(50);
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- Random rnd = new Random();
- rnd.setSeed(50);
+ ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+ BTreeOpContext deleteOpCtx = btree.createOpContext(IndexOp.DELETE, leafFrame, interiorFrame, metaFrame);
- ISerializerDeserializer[] recDescSers = {
- UTF8StringSerializerDeserializer.INSTANCE,
- UTF8StringSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ int runs = 3;
+ for (int run = 0; run < runs; run++) {
- BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
- leafFrame, interiorFrame, metaFrame);
- BTreeOpContext deleteOpCtx = btree.createOpContext(IndexOp.DELETE,
- leafFrame, interiorFrame, metaFrame);
+ print("DELETION TEST RUN: " + (run + 1) + "/" + runs + "\n");
- int runs = 3;
- for (int run = 0; run < runs; run++) {
+ print("INSERTING INTO BTREE\n");
+ int maxLength = 10;
+ int ins = 10000;
+ String[] f0s = new String[ins];
+ String[] f1s = new String[ins];
+ int insDone = 0;
+ int[] insDoneCmp = new int[ins];
+ for (int i = 0; i < ins; i++) {
+ String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+ String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
- print("DELETION TEST RUN: " + (run + 1) + "/" + runs + "\n");
+ f0s[i] = f0;
+ f1s[i] = f1;
- print("INSERTING INTO BTREE\n");
- int maxLength = 10;
- int ins = 10000;
- String[] f0s = new String[ins];
- String[] f1s = new String[ins];
- int insDone = 0;
- int[] insDoneCmp = new int[ins];
- for (int i = 0; i < ins; i++) {
- String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength
- + 1, rnd);
- String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength
- + 1, rnd);
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
- f0s[i] = f0;
- f1s[i] = f1;
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
+ tuple.reset(accessor, 0);
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
- tb.getSize());
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + "\n");
+ // print("INSERTING " + i + ": " + cmp.printRecord(record,
+ // 0) + "\n");
+ }
- tuple.reset(accessor, 0);
+ try {
+ btree.insert(tuple, insertOpCtx);
+ insDone++;
+ } catch (TreeIndexException e) {
+ // e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
- if (i % 1000 == 0) {
- print("INSERTING " + i + "\n");
- // print("INSERTING " + i + ": " + cmp.printRecord(record,
- // 0) + "\n");
- }
+ insDoneCmp[i] = insDone;
+ }
+ // btree.printTree();
+ // btree.printStats();
- try {
- btree.insert(tuple, insertOpCtx);
- insDone++;
- } catch (TreeIndexException e) {
- // e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
+ print("DELETING FROM BTREE\n");
+ int delDone = 0;
+ for (int i = 0; i < ins; i++) {
- insDoneCmp[i] = insDone;
- }
- // btree.printTree();
- // btree.printStats();
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
+ tb.addFieldEndOffset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize(f1s[i], dos);
+ tb.addFieldEndOffset();
- print("DELETING FROM BTREE\n");
- int delDone = 0;
- for (int i = 0; i < ins; i++) {
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- tb.reset();
- UTF8StringSerializerDeserializer.INSTANCE
- .serialize(f0s[i], dos);
- tb.addFieldEndOffset();
- UTF8StringSerializerDeserializer.INSTANCE
- .serialize(f1s[i], dos);
- tb.addFieldEndOffset();
+ tuple.reset(accessor, 0);
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
- tb.getSize());
+ if (i % 1000 == 0) {
+ // print("DELETING " + i + ": " +
+ // cmp.printRecord(records[i], 0) + "\n");
+ print("DELETING " + i + "\n");
+ }
- tuple.reset(accessor, 0);
+ try {
+ btree.delete(tuple, deleteOpCtx);
+ delDone++;
+ } catch (TreeIndexException e) {
+ // e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
- if (i % 1000 == 0) {
- // print("DELETING " + i + ": " +
- // cmp.printRecord(records[i], 0) + "\n");
- print("DELETING " + i + "\n");
- }
+ if (insDoneCmp[i] != delDone) {
+ print("INCONSISTENT STATE, ERROR IN DELETION TEST\n");
+ print("INSDONECMP: " + insDoneCmp[i] + " " + delDone + "\n");
+ break;
+ }
+ // btree.printTree();
+ }
+ // btree.printTree(leafFrame, interiorFrame);
- try {
- btree.delete(tuple, deleteOpCtx);
- delDone++;
- } catch (TreeIndexException e) {
- // e.printStackTrace();
- } catch (Exception e) {
- e.printStackTrace();
- }
+ if (insDone != delDone) {
+ print("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+ break;
+ }
+ }
- if (insDoneCmp[i] != delDone) {
- print("INCONSISTENT STATE, ERROR IN DELETION TEST\n");
- print("INSDONECMP: " + insDoneCmp[i] + " " + delDone + "\n");
- break;
- }
- // btree.printTree();
- }
- // btree.printTree(leafFrame, interiorFrame);
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
- if (insDone != delDone) {
- print("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
- break;
- }
- }
+ print("\n");
+ }
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ // BULK LOAD TEST
+ // insert 100,000 records in bulk
+ // B-tree has a composite key to "simulate" non-unique index creation
+ // do range search
+ @Test
+ public void test05() throws Exception {
- print("\n");
- }
+ print("BULK LOAD TEST\n");
- // BULK LOAD TEST
- // insert 100,000 records in bulk
- // B-tree has a composite key to "simulate" non-unique index creation
- // do range search
- @Test
- public void test05() throws Exception {
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- print("BULK LOAD TEST\n");
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
+ ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- ITreeIndexFrame leafFrame = leafFrameFactory.createFrame();
- ITreeIndexFrame interiorFrame = interiorFrameFactory.createFrame();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ Random rnd = new Random();
+ rnd.setSeed(50);
- Random rnd = new Random();
- rnd.setSeed(50);
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f, leafFrame, interiorFrame, metaFrame);
- IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f,
- leafFrame, interiorFrame, metaFrame);
+ // generate sorted records
+ int ins = 100000;
+ print("BULK LOADING " + ins + " RECORDS\n");
+ long start = System.currentTimeMillis();
+ for (int i = 0; i < ins; i++) {
- // generate sorted records
- int ins = 100000;
- print("BULK LOADING " + ins + " RECORDS\n");
- long start = System.currentTimeMillis();
- for (int i = 0; i < ins; i++) {
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
+ tb.addFieldEndOffset();
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
- tb.addFieldEndOffset();
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ tuple.reset(accessor, 0);
- tuple.reset(accessor, 0);
+ btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
+ }
- btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
- }
+ btree.endBulkLoad(bulkLoadCtx);
- btree.endBulkLoad(bulkLoadCtx);
+ // btree.printTree(leafFrame, interiorFrame);
- // btree.printTree(leafFrame, interiorFrame);
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ print("DURATION: " + duration + "\n");
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ // range search
+ print("RANGE SEARCH:\n");
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame) leafFrame);
- // range search
- print("RANGE SEARCH:\n");
- ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor((IBTreeLeafFrame)leafFrame);
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
+ DataOutput kdos = ktb.getDataOutput();
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
- DataOutput kdos = ktb.getDataOutput();
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ appender.reset(frame, true);
- appender.reset(frame, true);
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
- IBinaryComparator[] searchCmps = new IBinaryComparator[1];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+ // TODO: check when searching backwards
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- // TODO: check when searching backwards
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ print("\n");
+ }
- print("\n");
- }
+ // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
+ // demo for Arjun to show easy support of intersection queries on
+ // time-intervals
+ @Test
+ public void test06() throws Exception {
- // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
- // demo for Arjun to show easy support of intersection queries on
- // time-intervals
- @Test
- public void test06() throws Exception {
+ print("TIME-INTERVAL INTERSECTION DEMO\n");
- print("TIME-INTERVAL INTERSECTION DEMO\n");
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
- TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
- IBufferCache bufferCache = TestStorageManagerComponentHolder
- .getBufferCache(ctx);
- IFileMapProvider fmp = TestStorageManagerComponentHolder
- .getFileMapProvider(ctx);
- FileReference file = new FileReference(new File(fileName));
- bufferCache.createFile(file);
- int fileId = fmp.lookupFileId(file);
- bufferCache.openFile(fileId);
+ // declare fields
+ int fieldCount = 3;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(4);
+ typeTraits[1] = new TypeTrait(4);
+ typeTraits[2] = new TypeTrait(4);
- // declare fields
- int fieldCount = 3;
- ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
- typeTraits[0] = new TypeTrait(4);
- typeTraits[1] = new TypeTrait(4);
- typeTraits[2] = new TypeTrait(4);
+ // declare keys
+ int keyFieldCount = 2;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
- // declare keys
- int keyFieldCount = 2;
- IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
- cmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- cmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+ // SimpleTupleWriterFactory tupleWriterFactory = new
+ // SimpleTupleWriterFactory();
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
- // SimpleTupleWriterFactory tupleWriterFactory = new
- // SimpleTupleWriterFactory();
- TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(
- typeTraits);
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(
- tupleWriterFactory);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(
- tupleWriterFactory);
- ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
- IBTreeLeafFrame leafFrame = (IBTreeLeafFrame)leafFrameFactory.createFrame();
- IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame)interiorFrameFactory.createFrame();
- ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
- IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
-
- BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory,
- leafFrameFactory, cmp);
- btree.create(fileId, leafFrame, metaFrame);
- btree.open(fileId);
+ BTree btree = new BTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ btree.create(fileId, leafFrame, metaFrame);
+ btree.open(fileId);
- Random rnd = new Random();
- rnd.setSeed(50);
+ Random rnd = new Random();
+ rnd.setSeed(50);
- ByteBuffer frame = ctx.allocateFrame();
- FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
- ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
- DataOutput dos = tb.getDataOutput();
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
- ISerializerDeserializer[] recDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
- IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx
- .getFrameSize(), recDesc);
- accessor.reset(frame);
- FrameTupleReference tuple = new FrameTupleReference();
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+ FrameTupleReference tuple = new FrameTupleReference();
- long start = System.currentTimeMillis();
+ long start = System.currentTimeMillis();
- int intervalCount = 10;
- int[][] intervals = new int[intervalCount][2];
+ int intervalCount = 10;
+ int[][] intervals = new int[intervalCount][2];
- intervals[0][0] = 10;
- intervals[0][1] = 20;
+ intervals[0][0] = 10;
+ intervals[0][1] = 20;
- intervals[1][0] = 11;
- intervals[1][1] = 20;
+ intervals[1][0] = 11;
+ intervals[1][1] = 20;
- intervals[2][0] = 12;
- intervals[2][1] = 20;
+ intervals[2][0] = 12;
+ intervals[2][1] = 20;
- intervals[3][0] = 13;
- intervals[3][1] = 20;
+ intervals[3][0] = 13;
+ intervals[3][1] = 20;
- intervals[4][0] = 14;
- intervals[4][1] = 20;
+ intervals[4][0] = 14;
+ intervals[4][1] = 20;
- intervals[5][0] = 20;
- intervals[5][1] = 30;
+ intervals[5][0] = 20;
+ intervals[5][1] = 30;
- intervals[6][0] = 20;
- intervals[6][1] = 31;
+ intervals[6][0] = 20;
+ intervals[6][1] = 31;
- intervals[7][0] = 20;
- intervals[7][1] = 32;
+ intervals[7][0] = 20;
+ intervals[7][1] = 32;
- intervals[8][0] = 20;
- intervals[8][1] = 33;
+ intervals[8][0] = 20;
+ intervals[8][1] = 33;
- intervals[9][0] = 20;
- intervals[9][1] = 35;
+ intervals[9][0] = 20;
+ intervals[9][1] = 35;
- BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT,
- leafFrame, interiorFrame, metaFrame);
+ BTreeOpContext insertOpCtx = btree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
- // int exceptionCount = 0;
- for (int i = 0; i < intervalCount; i++) {
- int f0 = intervals[i][0];
- int f1 = intervals[i][1];
- int f2 = rnd.nextInt() % 100;
+ // int exceptionCount = 0;
+ for (int i = 0; i < intervalCount; i++) {
+ int f0 = intervals[i][0];
+ int f1 = intervals[i][1];
+ int f2 = rnd.nextInt() % 100;
- tb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
- tb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
- tb.addFieldEndOffset();
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+ tb.addFieldEndOffset();
- appender.reset(frame, true);
- appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb
- .getSize());
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
- tuple.reset(accessor, 0);
+ tuple.reset(accessor, 0);
- // print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
- print("INSERTING " + i + "\n");
+ // print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
+ print("INSERTING " + i + "\n");
- try {
- btree.insert(tuple, insertOpCtx);
- } catch (Exception e) {
- // e.printStackTrace();
- }
- }
- // btree.printTree(leafFrame, interiorFrame);
- // btree.printStats();
+ try {
+ btree.insert(tuple, insertOpCtx);
+ } catch (Exception e) {
+ // e.printStackTrace();
+ }
+ }
+ // btree.printTree(leafFrame, interiorFrame);
+ // btree.printStats();
- long end = System.currentTimeMillis();
- long duration = end - start;
- print("DURATION: " + duration + "\n");
+ long end = System.currentTimeMillis();
+ long duration = end - start;
+ print("DURATION: " + duration + "\n");
- // try a simple index scan
+ // try a simple index scan
- print("ORDERED SCAN:\n");
- ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
- RangePredicate nullPred = new RangePredicate(true, null, null, true,
- true, null, null);
- BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH,
- leafFrame, interiorFrame, null);
- btree.search(scanCursor, nullPred, searchOpCtx);
+ print("ORDERED SCAN:\n");
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(leafFrame);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ BTreeOpContext searchOpCtx = btree.createOpContext(IndexOp.SEARCH, leafFrame, interiorFrame, null);
+ btree.search(scanCursor, nullPred, searchOpCtx);
- try {
- while (scanCursor.hasNext()) {
- scanCursor.next();
- ITupleReference frameTuple = scanCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- scanCursor.close();
- }
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
- // try a range search
- print("RANGE SEARCH:\n");
- ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
+ // try a range search
+ print("RANGE SEARCH:\n");
+ ITreeIndexCursor rangeCursor = new BTreeRangeSearchCursor(leafFrame);
- // build low and high keys
- ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
- DataOutput kdos = ktb.getDataOutput();
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
- ISerializerDeserializer[] keyDescSers = {
- IntegerSerializerDeserializer.INSTANCE,
- IntegerSerializerDeserializer.INSTANCE };
- RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
- IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx
- .getFrameSize(), keyDesc);
- keyAccessor.reset(frame);
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
- appender.reset(frame, true);
+ appender.reset(frame, true);
- // build and append low key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
- ktb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+ ktb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- // build and append high key
- ktb.reset();
- IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
- ktb.addFieldEndOffset();
- IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
- ktb.addFieldEndOffset();
- appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb
- .getSize());
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+ ktb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
- // create tuplereferences for search keys
- FrameTupleReference lowKey = new FrameTupleReference();
- lowKey.reset(keyAccessor, 0);
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
- FrameTupleReference highKey = new FrameTupleReference();
- highKey.reset(keyAccessor, 1);
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
- IBinaryComparator[] searchCmps = new IBinaryComparator[2];
- searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE
- .createBinaryComparator();
- MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+ IBinaryComparator[] searchCmps = new IBinaryComparator[2];
+ searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
- // print("INDEX RANGE SEARCH ON: " + cmp.printKey(lowKey, 0) + " " +
- // cmp.printKey(highKey, 0) + "\n");
+ // print("INDEX RANGE SEARCH ON: " + cmp.printKey(lowKey, 0) + " " +
+ // cmp.printKey(highKey, 0) + "\n");
- RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
- true, true, searchCmp, searchCmp);
- btree.search(rangeCursor, rangePred, searchOpCtx);
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ btree.search(rangeCursor, rangePred, searchOpCtx);
- try {
- while (rangeCursor.hasNext()) {
- rangeCursor.next();
- ITupleReference frameTuple = rangeCursor.getTuple();
- String rec = cmp.printTuple(frameTuple, recDescSers);
- print(rec + "\n");
- }
- } catch (Exception e) {
- e.printStackTrace();
- } finally {
- rangeCursor.close();
- }
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
- btree.close();
- bufferCache.closeFile(fileId);
- bufferCache.close();
+ btree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
- print("\n");
- }
+ print("\n");
+ }
- public static String randomString(int length, Random random) {
- String s = Long.toHexString(Double
- .doubleToLongBits(random.nextDouble()));
- StringBuilder strBuilder = new StringBuilder();
- for (int i = 0; i < s.length() && i < length; i++) {
- strBuilder
- .append(s.charAt(Math.abs(random.nextInt()) % s.length()));
- }
- return strBuilder.toString();
- }
+ public static String randomString(int length, Random random) {
+ String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < s.length() && i < length; i++) {
+ strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+ }
+ return strBuilder.toString();
+ }
}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
index 54c6cb3..662d803 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/AbstractRTreeTest.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree;
import java.io.File;
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
index 8cffd4d..b0456ce 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/RTreeTest.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree;
import java.io.DataOutput;
@@ -30,15 +45,15 @@
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.impls.TreeDiskOrderScanCursor;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStats;
import edu.uci.ics.hyracks.storage.am.common.utility.TreeIndexStatsGatherer;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeDiskOrderScanCursor;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
@@ -88,8 +103,9 @@
RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory, keyFieldCount);
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
@@ -155,8 +171,10 @@
tuple.reset(accessor, 0);
- print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
- + " " + Math.max(p1y, p2y) + "\n");
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
try {
rtree.insert(tuple, insertOpCtx);
@@ -174,7 +192,346 @@
// disk-order scan
print("DISK-ORDER SCAN:\n");
- RTreeDiskOrderScanCursor diskOrderCursor = new RTreeDiskOrderScanCursor(leafFrame);
+ TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
+ RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
+ rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
+ try {
+ while (diskOrderCursor.hasNext()) {
+ diskOrderCursor.next();
+ ITupleReference frameTuple = diskOrderCursor.getTuple();
+ String rec = cmp.printTuple(frameTuple, recDescSers);
+ print(rec + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ diskOrderCursor.close();
+ }
+
+ TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
+ rtree.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
+ String string = stats.toString();
+ System.out.println(string);
+
+ rtree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ }
+
+ @Test
+ public void test02() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare keys
+ int keyFieldCount = 4;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = cmps[0];
+ cmps[2] = cmps[0];
+ cmps[3] = cmps[0];
+
+ // declare tuple fields
+ int fieldCount = 7;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(8);
+ typeTraits[1] = new TypeTrait(8);
+ typeTraits[2] = new TypeTrait(8);
+ typeTraits[3] = new TypeTrait(8);
+ typeTraits[4] = new TypeTrait(8);
+ typeTraits[5] = new TypeTrait(4);
+ typeTraits[6] = new TypeTrait(8);
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ rtree.create(fileId, leafFrame, metaFrame);
+ rtree.open(fileId);
+
+ ByteBuffer hyracksFrame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ @SuppressWarnings("rawtypes")
+ ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(hyracksFrame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+
+ double pk1 = rnd.nextDouble();
+ int pk2 = rnd.nextInt();
+ double pk3 = rnd.nextDouble();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
+
+ try {
+ rtree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // rtree.printTree(leafFrame, interiorFrame, recDescSers);
+ // System.out.println();
+
+ String rtreeStats = rtree.printStats();
+ print(rtreeStats);
+
+ RTreeOpContext deleteOpCtx = rtree.createOpContext(IndexOp.DELETE, leafFrame, interiorFrame, metaFrame);
+ rnd.setSeed(50);
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+
+ double pk1 = rnd.nextDouble();
+ int pk2 = rnd.nextInt();
+ double pk3 = rnd.nextDouble();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("DELETING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
+
+ try {
+ rtree.delete(tuple, deleteOpCtx);
+
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ TreeIndexStatsGatherer statsGatherer = new TreeIndexStatsGatherer(bufferCache, freePageManager, fileId,
+ rtree.getRootPageId());
+ TreeIndexStats stats = statsGatherer.gatherStats(leafFrame, interiorFrame, metaFrame);
+ String string = stats.toString();
+ System.out.println(string);
+
+ rtree.close();
+ bufferCache.closeFile(fileId);
+ bufferCache.close();
+
+ }
+
+ @Test
+ public void test03() throws Exception {
+
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // declare keys
+ int keyFieldCount = 6;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = DoubleBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ cmps[1] = cmps[0];
+ cmps[2] = cmps[0];
+ cmps[3] = cmps[0];
+ cmps[4] = cmps[0];
+ cmps[5] = cmps[0];
+
+ // declare tuple fields
+ int fieldCount = 9;
+ ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ typeTraits[0] = new TypeTrait(8);
+ typeTraits[1] = new TypeTrait(8);
+ typeTraits[2] = new TypeTrait(8);
+ typeTraits[3] = new TypeTrait(8);
+ typeTraits[4] = new TypeTrait(8);
+ typeTraits[5] = new TypeTrait(8);
+ typeTraits[6] = new TypeTrait(8);
+ typeTraits[7] = new TypeTrait(4);
+ typeTraits[8] = new TypeTrait(8);
+
+ MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+ RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
+
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IRTreeFrame interiorFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ IRTreeFrame leafFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+
+ RTree rtree = new RTree(bufferCache, freePageManager, interiorFrameFactory, leafFrameFactory, cmp);
+ rtree.create(fileId, leafFrame, metaFrame);
+ rtree.open(fileId);
+
+ ByteBuffer hyracksFrame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+ DataOutput dos = tb.getDataOutput();
+
+ @SuppressWarnings("rawtypes")
+ ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(hyracksFrame);
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ RTreeOpContext insertOpCtx = rtree.createOpContext(IndexOp.INSERT, leafFrame, interiorFrame, metaFrame);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ for (int i = 0; i < 10000; i++) {
+
+ double p1x = rnd.nextDouble();
+ double p1y = rnd.nextDouble();
+ double p1z = rnd.nextDouble();
+ double p2x = rnd.nextDouble();
+ double p2y = rnd.nextDouble();
+ double p2z = rnd.nextDouble();
+
+ double pk1 = rnd.nextDouble();
+ int pk2 = rnd.nextInt();
+ double pk3 = rnd.nextDouble();
+
+ tb.reset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1z, p2z), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1z, p2z), dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+ tb.addFieldEndOffset();
+ DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(hyracksFrame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.min(p1z, p2z)
+ + " " + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " " + Math.max(p1z, p2z) + "\n");
+ }
+
+ try {
+ rtree.insert(tuple, insertOpCtx);
+ } catch (TreeIndexException e) {
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // rtree.printTree(leafFrame, interiorFrame, recDescSers);
+ // System.out.println();
+
+ String rtreeStats = rtree.printStats();
+ print(rtreeStats);
+
+ // disk-order scan
+ print("DISK-ORDER SCAN:\n");
+ TreeDiskOrderScanCursor diskOrderCursor = new TreeDiskOrderScanCursor(leafFrame);
RTreeOpContext diskOrderScanOpCtx = rtree.createOpContext(IndexOp.DISKORDERSCAN, leafFrame, null, null);
rtree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame, diskOrderScanOpCtx);
try {
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
index f8d0383..9a4f496 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/SearchCursorTest.java
@@ -1,3 +1,18 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
package edu.uci.ics.hyracks.storage.am.rtree;
import java.io.ByteArrayInputStream;
@@ -39,8 +54,8 @@
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.NSMInteriorFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeOpContext;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSearchCursor;
@@ -90,8 +105,9 @@
RTreeTypeAwareTupleWriterFactory tupleWriterFactory = new RTreeTypeAwareTupleWriterFactory(typeTraits);
- ITreeIndexFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory, keyFieldCount);
- ITreeIndexFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
+ ITreeIndexFrameFactory interiorFrameFactory = new RTreeNSMInteriorFrameFactory(tupleWriterFactory,
+ keyFieldCount);
+ ITreeIndexFrameFactory leafFrameFactory = new RTreeNSMLeafFrameFactory(tupleWriterFactory, keyFieldCount);
ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
@@ -147,8 +163,10 @@
tuple.reset(accessor, 0);
- print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
- + " " + Math.max(p1y, p2y) + "\n");
+ if (i % 1000 == 0) {
+ print("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " " + Math.max(p1x, p2x)
+ + " " + Math.max(p1y, p2y) + "\n");
+ }
try {
rtree.insert(tuple, insertOpCtx);