- Fixed various bugs in the RTree concurrency control protocol which caused some searchers to miss some nodes due to concurrent splits.
- Fixed a bug in the unordered slot manager, and also reduced number of copy operations in the process of splitting a node.
- Fixed a bug in the LSMRTree search cursor that causes deadlocks due to incorrect BTree cursors resets.
- Fixed a minor bug in the LSMRTree merge.
- Applied the new file naming scheme for LSMRTree based on timestamp intervals.
- Added RTree and LSMRTree multi-threading test framework.
- Some code refactoring in the RTree frames.
- Code cleaning.
git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_lsm_tree@1171 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
index d888aa0..12bc997 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/IntArrayList.java
@@ -16,74 +16,83 @@
package edu.uci.ics.hyracks.storage.am.common.ophelpers;
public class IntArrayList {
- private int[] data;
- private int size;
- private int first;
- private final int growth;
+ private int[] data;
+ private int size;
+ private int first;
+ private final int growth;
- public IntArrayList(int initialCapacity, int growth) {
- data = new int[initialCapacity];
- size = 0;
- first = 0;
- this.growth = growth;
- }
+ public IntArrayList(int initialCapacity, int growth) {
+ data = new int[initialCapacity];
+ size = 0;
+ first = 0;
+ this.growth = growth;
+ }
- public int size() {
- return size;
- }
+ public int size() {
+ return size;
+ }
- public int first() {
- return first;
- }
+ public int first() {
+ return first;
+ }
- public void add(int i) {
- if (size == data.length) {
- int[] newData = new int[data.length + growth];
- System.arraycopy(data, 0, newData, 0, data.length);
- data = newData;
- }
+ public void add(int i) {
+ if (size == data.length) {
+ int[] newData = new int[data.length + growth];
+ System.arraycopy(data, 0, newData, 0, data.length);
+ data = newData;
+ }
- data[size++] = i;
- }
+ data[size++] = i;
+ }
- public void removeLast() {
- if (size > 0)
- size--;
- }
+ public void addFirst(int i) {
+ int[] newData = new int[data.length + 1];
+ System.arraycopy(data, 0, newData, 0, first);
+ System.arraycopy(data, first, newData, first + 1, size - first);
+ data = newData;
+ data[first] = i;
+ size++;
+ }
- // WARNING: caller is responsible for checking size > 0
- public int getLast() {
- return data[size - 1];
- }
+ public void removeLast() {
+ if (size > 0)
+ size--;
+ }
- public int get(int i) {
- return data[i];
- }
+ // WARNING: caller is responsible for checking size > 0
+ public int getLast() {
+ return data[size - 1];
+ }
- // WARNING: caller is responsible for checking i < size
- public void set(int i, int value) {
- data[i] = value;
+ public int get(int i) {
+ return data[i];
+ }
- }
+ // WARNING: caller is responsible for checking i < size
+ public void set(int i, int value) {
+ data[i] = value;
- public int getFirst() {
- return data[first];
- }
+ }
- public void moveFirst() {
- first++;
- }
+ public int getFirst() {
+ return data[first];
+ }
- public void clear() {
- size = 0;
- first = 0;
- }
+ public void moveFirst() {
+ first++;
+ }
- public boolean isLast() {
- return size == first;
- }
+ public void clear() {
+ size = 0;
+ first = 0;
+ }
- public boolean isEmpty() {
- return size == 0;
- }
+ public boolean isLast() {
+ return size == first;
+ }
+
+ public boolean isEmpty() {
+ return size == 0;
+ }
}
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
index 4dd1b5f..cb4c8fe 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/ophelpers/LongArrayList.java
@@ -35,6 +35,16 @@
public int first() {
return first;
}
+
+ public void addFirst(long i) {
+ long[] newData = new long[data.length + 1];
+ System.arraycopy(data, 0, newData, 0, first);
+ System.arraycopy(data, first, newData, first + 1, size - first);
+ data = newData;
+ data[first] = i;
+ size++;
+ }
+
public void add(long i) {
if (size == data.length) {
diff --git a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/test/TestOperationSelector.java b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/test/TestOperationSelector.java
index efc1ec4..bed8da6 100644
--- a/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/test/TestOperationSelector.java
+++ b/hyracks-storage-am-common/src/main/java/edu/uci/ics/hyracks/storage/am/common/test/TestOperationSelector.java
@@ -25,7 +25,7 @@
UPDATE,
POINT_SEARCH,
RANGE_SEARCH,
- ORDERED_SCAN,
+ SCAN,
DISKORDER_SCAN,
MERGE
}
diff --git a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
index ba32461..ef481ba 100644
--- a/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
+++ b/hyracks-storage-am-lsm-btree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/btree/impls/LSMBTree.java
@@ -113,8 +113,9 @@
@Override
public void open(int indexFileId) throws HyracksDataException {
memBTree.open(indexFileId);
- List<String> validFileNames = fileManager.cleanupAndGetValidFiles();
- for (String fileName : validFileNames) {
+ List<Object> validFileNames = fileManager.cleanupAndGetValidFiles();
+ for (Object o : validFileNames) {
+ String fileName = (String) o;
BTree btree = createDiskBTree(diskBTreeFactory, fileName, false);
diskBTrees.add(btree);
}
@@ -255,7 +256,7 @@
}
diskBTree.endBulkLoad(bulkLoadCtx);
- String finalFileName = fileManager.getFlushFileName();
+ String finalFileName = (String) fileManager.getFlushFileName();
rename(diskBTree, finalFileName);
return diskBTree;
}
@@ -276,7 +277,8 @@
BTree lastBTree = (BTree) mergingDiskBTrees.get(mergingDiskBTrees.size() - 1);
FileReference firstFile = diskFileMapProvider.lookupFileName(firstBTree.getFileId());
FileReference lastFile = diskFileMapProvider.lookupFileName(lastBTree.getFileId());
- String fileName = fileManager.getMergeFileName(firstFile.getFile().getName(), lastFile.getFile().getName());
+ String fileName = (String) fileManager.getMergeFileName(firstFile.getFile().getName(), lastFile.getFile()
+ .getName());
return fileName;
}
@@ -459,7 +461,7 @@
public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
LSMTreeBulkLoadContext bulkLoadCtx = (LSMTreeBulkLoadContext) ictx;
bulkLoadCtx.getBTree().endBulkLoad(bulkLoadCtx.getBulkLoadCtx());
- String finalFileName = fileManager.getFlushFileName();
+ String finalFileName = (String) fileManager.getFlushFileName();
rename(bulkLoadCtx.getBTree(), finalFileName);
lsmHarness.addBulkLoadedComponent(bulkLoadCtx.getBTree());
}
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMFileManager.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMFileManager.java
index 982749d..af1b942 100644
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMFileManager.java
+++ b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/api/ILSMFileManager.java
@@ -35,9 +35,9 @@
public interface ILSMFileManager {
public void createDirs();
- public String getFlushFileName();
+ public Object getFlushFileName();
- public String getMergeFileName(String firstFileName, String lastFileName) throws HyracksDataException;
+ public Object getMergeFileName(String firstFileName, String lastFileName) throws HyracksDataException;
public String getBaseDir();
@@ -48,7 +48,7 @@
// Deletes invalid files, and returns list of valid files from baseDir.
// The returned valid files are correctly sorted (based on the recency of data).
- public List<String> cleanupAndGetValidFiles() throws HyracksDataException;
+ public List<Object> cleanupAndGetValidFiles() throws HyracksDataException;
public Comparator<String> getFileNameComparator();
diff --git a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeFileManager.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeFileManager.java
index ab88b45..c30a999 100644
--- a/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeFileManager.java
+++ b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeFileManager.java
@@ -18,8 +18,6 @@
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
-import java.nio.file.Files;
-import java.nio.file.StandardCopyOption;
import java.text.Format;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
@@ -36,16 +34,16 @@
public class LSMTreeFileManager implements ILSMFileManager {
- private static final String SPLIT_STRING = "_";
- private static final String TEMP_FILE_PREFIX = "lsm_tree";
+ protected static final String SPLIT_STRING = "_";
+ protected static final String TEMP_FILE_PREFIX = "lsm_tree";
// Currently uses all IODevices registered in ioManager in a round-robin fashion.
- private final IOManager ioManager;
+ protected final IOManager ioManager;
// baseDir should reflect dataset name, and partition name.
- private final String baseDir;
- private final Format formatter = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS");
- private final Comparator<String> cmp = new FileNameComparator();
- private final Comparator<ComparableFileName> recencyCmp = new RecencyComparator();
+ protected final String baseDir;
+ protected final Format formatter = new SimpleDateFormat("yyyy-MM-dd-HH-mm-ss-SSS");
+ protected final Comparator<String> cmp = new FileNameComparator();
+ protected final Comparator<ComparableFileName> recencyCmp = new RecencyComparator();
public LSMTreeFileManager(IOManager ioManager, String baseDir) {
if (!baseDir.endsWith(System.getProperty("file.separator"))) {
@@ -74,16 +72,17 @@
@Override
public FileReference rename(FileReference src, String dest) throws HyracksDataException {
FileReference destFile = new FileReference(src.getDevideHandle(), dest);
- try {
- Files.move(src.getFile().toPath(), destFile.getFile().toPath(), StandardCopyOption.ATOMIC_MOVE);
- } catch (IOException e) {
- throw new HyracksDataException(e);
- }
+ //try {
+ //Files.move(src.getFile().toPath(), destFile.getFile().toPath(), StandardCopyOption.ATOMIC_MOVE);
+ src.getFile().renameTo(destFile.getFile());
+ //} catch (IOException e) {
+ // throw new HyracksDataException(e);
+ //}
return destFile;
}
@Override
- public String getFlushFileName() {
+ public Object getFlushFileName() {
Date date = new Date();
String ts = formatter.format(date);
// Begin timestamp and end timestamp are identical.
@@ -91,7 +90,7 @@
}
@Override
- public String getMergeFileName(String firstFileName, String lastFileName) throws HyracksDataException {
+ public Object getMergeFileName(String firstFileName, String lastFileName) throws HyracksDataException {
String[] firstTimestampRange = firstFileName.split(SPLIT_STRING);
String[] lastTimestampRange = lastFileName.split(SPLIT_STRING);
// Enclosing timestamp range.
@@ -127,8 +126,8 @@
}
@Override
- public List<String> cleanupAndGetValidFiles() throws HyracksDataException {
- List<String> validFiles = new ArrayList<String>();
+ public List<Object> cleanupAndGetValidFiles() throws HyracksDataException {
+ List<Object> validFiles = new ArrayList<Object>();
ArrayList<ComparableFileName> allFiles = new ArrayList<ComparableFileName>();
// Gather files from all IODeviceHandles.
for(IODeviceHandle dev : ioManager.getIODevices()) {
@@ -182,7 +181,7 @@
return validFiles;
}
- private class ComparableFileName implements Comparable<ComparableFileName> {
+ protected class ComparableFileName implements Comparable<ComparableFileName> {
public final String fullPath;
// Timestamp interval.
public final String[] interval;
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
index dc37aeb..c897943 100644
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
+++ b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
@@ -16,9 +16,6 @@
package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
import java.io.File;
-import java.io.FilenameFilter;
-import java.util.Arrays;
-import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import java.util.ListIterator;
@@ -43,6 +40,7 @@
import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMFileManager;
import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMTree;
import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
@@ -50,6 +48,7 @@
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMHarness;
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeIndexAccessor;
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.TreeFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeFileManager.LSMRTreeFileNameComponent;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
@@ -86,7 +85,7 @@
private final static int MEM_BTREE_FILE_ID = 1;
// On-disk components.
- private final ILSMFileManager fileNameManager;
+ private final ILSMFileManager fileManager;
protected final IBufferCache diskBufferCache;
protected final IFileMapProvider diskFileMapProvider;
// For creating RTree's used in flush and merge.
@@ -109,7 +108,7 @@
public LSMRTree(IBufferCache memBufferCache, InMemoryFreePageManager memFreePageManager,
ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory rtreeLeafFrameFactory,
ITreeIndexFrameFactory btreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
- ILSMFileManager fileNameManager, RTreeFactory diskRTreeFactory, BTreeFactory diskBTreeFactory,
+ ILSMFileManager fileManager, RTreeFactory diskRTreeFactory, BTreeFactory diskBTreeFactory,
IFileMapProvider diskFileMapProvider, int fieldCount, IBinaryComparatorFactory[] rtreeCmpFactories,
IBinaryComparatorFactory[] btreeCmpFactories) {
RTree memRTree = new RTree(memBufferCache, fieldCount, rtreeCmpFactories, memFreePageManager,
@@ -121,7 +120,7 @@
this.diskBufferCache = diskBTreeFactory.getBufferCache();
this.diskFileMapProvider = diskFileMapProvider;
this.diskBTreeFactory = diskBTreeFactory;
- this.fileNameManager = fileNameManager;
+ this.fileManager = fileManager;
this.rtreeInteriorFrameFactory = rtreeInteriorFrameFactory;
this.rtreeLeafFrameFactory = rtreeLeafFrameFactory;
this.btreeInteriorFrameFactory = btreeInteriorFrameFactory;
@@ -136,19 +135,12 @@
public void create(int indexFileId) throws HyracksDataException {
memComponent.getRTree().create(MEM_RTREE_FILE_ID);
memComponent.getBTree().create(MEM_BTREE_FILE_ID);
+ fileManager.createDirs();
}
/**
- * Opens LSMRTree, assuming a consistent state of the disk-resident
- * components. In particular, registers all files in in base dir of
- * fileNameManager as on-disk RTrees and BTrees.
- *
- * Example pathological scenario to explain "consistent state assumption":
- * Suppose a merge finished, but before the original files were deleted the
- * system crashes. We are left in a state where we have the original RTrees
- * and BTrees in addition to the merged ones. We assume that prior to
- * calling this method a separate recovery process has ensured the
- * consistent of the disk-resident components.
+ * Opens LSMRTree, cleaning up invalid files from base dir, and registering
+ * all valid files as on-disk RTrees and BTrees.
*
* @param indexFileId
* Dummy file id.
@@ -156,38 +148,17 @@
*/
@Override
public void open(int indexFileId) throws HyracksDataException {
- // TODO: Port to new naming scheme.
- memComponent.getRTree().open(MEM_RTREE_FILE_ID);
+ memComponent.getRTree().open(MEM_RTREE_FILE_ID);
memComponent.getBTree().open(MEM_BTREE_FILE_ID);
- File dir = new File(fileNameManager.getBaseDir());
- FilenameFilter rtreeFilter = new FilenameFilter() {
- public boolean accept(File dir, String name) {
- return !name.startsWith(".") && name.endsWith("rtree");
- }
- };
- String[] rtreeFiles = dir.list(rtreeFilter);
-
- FilenameFilter btreeFilter = new FilenameFilter() {
- public boolean accept(File dir, String name) {
- return !name.startsWith(".") && name.endsWith("btree");
- }
- };
- String[] btreeFiles = dir.list(btreeFilter);
-
- if (rtreeFiles == null || btreeFiles == null) {
- return;
- }
-
- Comparator<String> fileNameCmp = fileNameManager.getFileNameComparator();
- Arrays.sort(rtreeFiles, fileNameCmp);
- Arrays.sort(btreeFiles, fileNameCmp);
- // Assert rtreeFiles.size() == btreeFiles.size()
- for (int i = 0; i < rtreeFiles.length; i++) {
- RTree rtree = (RTree) createDiskTree(diskRTreeFactory, rtreeFiles[i], false);
- BTree btree = (BTree) createDiskTree(diskBTreeFactory, btreeFiles[i], false);
+ List<Object> validFileNames = fileManager.cleanupAndGetValidFiles();
+ for (Object o : validFileNames) {
+ LSMRTreeFileNameComponent component = (LSMRTreeFileNameComponent) o;
+ RTree rtree = (RTree) createDiskTree(diskRTreeFactory, component.getRTreeFileName(), false);
+ BTree btree = (BTree) createDiskTree(diskBTreeFactory, component.getBTreeFileName(), false);
LSMRTreeComponent diskComponent = new LSMRTreeComponent(rtree, btree);
diskComponents.add(diskComponent);
}
+
}
@Override
@@ -206,7 +177,44 @@
memComponent.getBTree().close();
}
- // TODO: Candidate for more code sharing.
+ private LSMRTreeFileNameComponent getMergeTargetFileName(List<Object> mergingDiskTrees) throws HyracksDataException {
+ RTree firstTree = ((LSMRTreeComponent) mergingDiskTrees.get(0)).getRTree();
+ RTree lastTree = ((LSMRTreeComponent) mergingDiskTrees.get(mergingDiskTrees.size() - 1)).getRTree();
+ FileReference firstFile = diskFileMapProvider.lookupFileName(firstTree.getFileId());
+ FileReference lastFile = diskFileMapProvider.lookupFileName(lastTree.getFileId());
+ LSMRTreeFileNameComponent component = (LSMRTreeFileNameComponent) ((LSMRTreeFileManager) fileManager)
+ .getMergeFileName(firstFile.getFile().getName(), lastFile.getFile().getName());
+ return component;
+ }
+
+ private void rename(ITreeIndex srcTmpTree, String dest) throws HyracksDataException {
+ int tmpFileId = srcTmpTree.getFileId();
+ FileReference srcFileRef = diskFileMapProvider.lookupFileName(tmpFileId);
+ diskBufferCache.closeFile(tmpFileId);
+ diskBufferCache.deleteFile(tmpFileId, true);
+ FileReference destFileRef = fileManager.rename(srcFileRef, dest);
+ // File will be deleted during cleanup of merge().
+ diskBufferCache.createFile(destFileRef);
+ int newFileId = diskFileMapProvider.lookupFileId(destFileRef);
+ diskBufferCache.openFile(newFileId);
+ srcTmpTree.open(newFileId);
+ }
+
+ private ITreeIndex createTempTree(TreeFactory diskTreeFactory) throws HyracksDataException {
+ FileReference file = fileManager.createTempFile();
+ // File will be deleted during rename().
+ diskBufferCache.createFile(file);
+ int diskTreeFileId = diskFileMapProvider.lookupFileId(file);
+ // File will be closed during rename().
+ diskBufferCache.openFile(diskTreeFileId);
+ // Create new tree instance.
+ ITreeIndex diskTree = diskTreeFactory.createIndexInstance(diskTreeFileId);
+ diskTree.create(diskTreeFileId);
+ // Tree will be closed during cleanup of merge().
+ diskTree.open(diskTreeFileId);
+ return diskTree;
+ }
+
protected ITreeIndex createDiskTree(TreeFactory diskTreeFactory, String fileName, boolean createTree)
throws HyracksDataException {
// Register the new tree file.
@@ -228,14 +236,10 @@
@Override
public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws TreeIndexException, HyracksDataException {
- // Note that by using a flush target file name, we state that the new
- // bulk loaded tree is "newer" than any other merged tree.
-
- String fileName = fileNameManager.getFlushFileName();
- RTree diskRTree = (RTree) createDiskTree(diskRTreeFactory, fileName + "-rtree", true);
+ RTree diskRTree = (RTree) createTempTree(diskRTreeFactory);
// For each RTree, we require to have a buddy BTree. thus, we create an
- // empty BTree. This can be optimized later.
- BTree diskBTree = (BTree) createDiskTree(diskBTreeFactory, fileName + "-btree", true);
+ // empty BTree.
+ BTree diskBTree = (BTree) createTempTree(diskBTreeFactory);
LSMRTreeBulkLoadContext bulkLoadCtx = new LSMRTreeBulkLoadContext(diskRTree, diskBTree);
bulkLoadCtx.beginBulkLoad(fillFactor);
return bulkLoadCtx;
@@ -252,43 +256,47 @@
public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
LSMRTreeBulkLoadContext bulkLoadCtx = (LSMRTreeBulkLoadContext) ictx;
bulkLoadCtx.getRTree().endBulkLoad(bulkLoadCtx.getBulkLoadCtx());
+ LSMRTreeFileNameComponent component = (LSMRTreeFileNameComponent) ((LSMRTreeFileManager) fileManager)
+ .getFlushFileName();
+ rename(bulkLoadCtx.getRTree(), component.getRTreeFileName());
+ rename(bulkLoadCtx.getBTree(), component.getBTreeFileName());
LSMRTreeComponent diskComponent = new LSMRTreeComponent(bulkLoadCtx.getRTree(), bulkLoadCtx.getBTree());
lsmHarness.addBulkLoadedComponent(diskComponent);
}
@Override
public ITreeIndexFrameFactory getLeafFrameFactory() {
- return null;
+ return memComponent.getRTree().getLeafFrameFactory();
}
@Override
public ITreeIndexFrameFactory getInteriorFrameFactory() {
- return null;
+ return memComponent.getRTree().getInteriorFrameFactory();
}
@Override
public IFreePageManager getFreePageManager() {
- return null;
+ return memComponent.getRTree().getFreePageManager();
}
@Override
public int getFieldCount() {
- return 0;
+ return memComponent.getRTree().getFieldCount();
}
@Override
public int getRootPageId() {
- return 0;
+ return memComponent.getRTree().getRootPageId();
}
@Override
public IndexType getIndexType() {
- return null;
+ return memComponent.getRTree().getIndexType();
}
@Override
public int getFileId() {
- return 0;
+ return memComponent.getRTree().getFileId();
}
public boolean insertUpdateOrDelete(ITupleReference tuple, IIndexOpContext ictx) throws HyracksDataException,
@@ -332,7 +340,8 @@
}
public void search(ITreeIndexCursor cursor, List<Object> diskComponents, ISearchPredicate pred,
- IIndexOpContext ictx, boolean includeMemComponent, AtomicInteger searcherRefCount) throws HyracksDataException, TreeIndexException {
+ IIndexOpContext ictx, boolean includeMemComponent, AtomicInteger searcherRefCount)
+ throws HyracksDataException, TreeIndexException {
LSMRTreeOpContext ctx = (LSMRTreeOpContext) ictx;
int numDiskTrees = diskComponents.size();
int numTrees = (includeMemComponent) ? numDiskTrees + 1 : numDiskTrees;
@@ -382,14 +391,16 @@
@Override
public Object flush() throws HyracksDataException, TreeIndexException {
+ // Renaming order is critical because we use assume ordering when we
+ // read the file names when we open the tree.
+ // The RTree should be renamed before the BTree.
+
// scan the memory RTree
ITreeIndexAccessor memRTreeAccessor = memComponent.getRTree().createAccessor();
ITreeIndexCursor rtreeScanCursor = memRTreeAccessor.createSearchCursor();
SearchPredicate rtreeNullPredicate = new SearchPredicate(null, null);
memRTreeAccessor.search(rtreeScanCursor, rtreeNullPredicate);
-
- String fileName = fileNameManager.getFlushFileName();
- RTree diskRTree = (RTree) createDiskTree(diskRTreeFactory, fileName + "-rtree", true);
+ RTree diskRTree = (RTree) createTempTree(diskRTreeFactory);
// BulkLoad the tuples from the in-memory tree into the new disk RTree.
IIndexBulkLoadContext rtreeBulkLoadCtx = diskRTree.beginBulkLoad(1.0f);
@@ -410,8 +421,7 @@
ITreeIndexCursor btreeScanCursor = memBTreeAccessor.createSearchCursor();
RangePredicate btreeNullPredicate = new RangePredicate(null, null, true, true, null, null);
memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
-
- BTree diskBTree = (BTree) createDiskTree(diskBTreeFactory, fileName + "-btree", true);
+ BTree diskBTree = (BTree) createTempTree(diskBTreeFactory);
// BulkLoad the tuples from the in-memory tree into the new disk BTree.
IIndexBulkLoadContext btreeBulkLoadCtx = diskBTree.beginBulkLoad(1.0f);
@@ -425,11 +435,21 @@
btreeScanCursor.close();
}
diskBTree.endBulkLoad(btreeBulkLoadCtx);
+
+ LSMRTreeFileNameComponent component = (LSMRTreeFileNameComponent) ((LSMRTreeFileManager) fileManager)
+ .getFlushFileName();
+ rename(diskRTree, component.getRTreeFileName());
+ rename(diskBTree, component.getBTreeFileName());
+
return new LSMRTreeComponent(diskRTree, diskBTree);
}
@Override
public Object merge(List<Object> mergedComponents) throws HyracksDataException, TreeIndexException {
+ // Renaming order is critical because we use assume ordering when we
+ // read the file names when we open the tree.
+ // The RTree should be renamed before the BTree.
+
IIndexOpContext ctx = createOpContext();
ITreeIndexCursor cursor = new LSMRTreeSearchCursor();
ISearchPredicate rtreeSearchPred = new SearchPredicate(null, null);
@@ -437,12 +457,14 @@
List<Object> mergingComponents = lsmHarness.search(cursor, rtreeSearchPred, ctx, false);
mergedComponents.addAll(mergingComponents);
- // Bulk load the tuples from all on-disk RTrees into the new RTree.
- // TODO: Passing dummy values for now. Switch to naming scheme.
- String fileName = fileNameManager.getMergeFileName("dummy", "dummy");
- RTree mergedRTree = (RTree) createDiskTree(diskRTreeFactory, fileName + "-rtree", true);
- BTree mergedBTree = (BTree) createDiskTree(diskBTreeFactory, fileName + "-btree", true);
+ // Nothing to merge.
+ if (mergedComponents.isEmpty()) {
+ return null;
+ }
+ // Bulk load the tuples from all on-disk RTrees into the new RTree.
+ RTree mergedRTree = (RTree) createTempTree(diskRTreeFactory);
+ BTree mergedBTree = (BTree) createTempTree(diskBTreeFactory);
IIndexBulkLoadContext bulkLoadCtx = mergedRTree.beginBulkLoad(1.0f);
try {
while (cursor.hasNext()) {
@@ -454,6 +476,10 @@
cursor.close();
}
mergedRTree.endBulkLoad(bulkLoadCtx);
+
+ LSMRTreeFileNameComponent component = getMergeTargetFileName(mergingComponents);
+ rename(mergedRTree, component.getRTreeFileName());
+ rename(mergedBTree, component.getBTreeFileName());
return new LSMRTreeComponent(mergedRTree, mergedBTree);
}
@@ -516,7 +542,7 @@
return new LSMRTreeAccessor(lsmHarness, createOpContext());
}
- private class LSMRTreeAccessor extends LSMTreeIndexAccessor {
+ public class LSMRTreeAccessor extends LSMTreeIndexAccessor {
public LSMRTreeAccessor(LSMHarness lsmHarness, IIndexOpContext ctx) {
super(lsmHarness, ctx);
}
@@ -525,6 +551,11 @@
public ITreeIndexCursor createSearchCursor() {
return new LSMRTreeSearchCursor();
}
+
+ public MultiComparator getMultiComparator() {
+ LSMRTreeOpContext concreteCtx = (LSMRTreeOpContext) ctx;
+ return concreteCtx.rtreeOpContext.cmp;
+ }
}
public IBinaryComparatorFactory[] getComparatorFactories() {
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
new file mode 100644
index 0000000..07daa43
--- /dev/null
+++ b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileManager.java
@@ -0,0 +1,184 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.IODeviceHandle;
+import edu.uci.ics.hyracks.control.nc.io.IOManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeFileManager;
+
+public class LSMRTreeFileManager extends LSMTreeFileManager {
+ private static final String RTREE_STRING = "r";
+ private static final String BTREE_STRING = "b";
+
+ public LSMRTreeFileManager(IOManager ioManager, String baseDir) {
+ super(ioManager, baseDir);
+ }
+
+ @Override
+ public Object getFlushFileName() {
+ String baseName = (String) super.getFlushFileName();
+ return new LSMRTreeFileNameComponent(baseName + SPLIT_STRING + RTREE_STRING, baseName + SPLIT_STRING
+ + BTREE_STRING);
+
+ }
+
+ @Override
+ public Object getMergeFileName(String firstFileName, String lastFileName) throws HyracksDataException {
+ String baseName = (String) super.getMergeFileName(firstFileName, lastFileName);
+ return new LSMRTreeFileNameComponent(baseName + SPLIT_STRING + RTREE_STRING, baseName + SPLIT_STRING
+ + BTREE_STRING);
+ }
+
+ private boolean searchForFileName(HashSet<String> stringSet, String file) {
+ if (stringSet.contains(file)) {
+ return true;
+ }
+ return false;
+
+ }
+
+ public List<Object> cleanupAndGetValidFiles() throws HyracksDataException {
+ List<Object> validFiles = new ArrayList<Object>();
+ ArrayList<ComparableFileName> allRTreeFiles = new ArrayList<ComparableFileName>();
+ ArrayList<ComparableFileName> allBTreeFiles = new ArrayList<ComparableFileName>();
+
+ // Gather files from all IODeviceHandles.
+ for (IODeviceHandle dev : ioManager.getIODevices()) {
+ File dir = new File(dev.getPath(), baseDir);
+ FilenameFilter btreeFilter = new FilenameFilter() {
+ public boolean accept(File dir, String name) {
+ return !name.startsWith(".") && name.endsWith(BTREE_STRING);
+ }
+ };
+ String[] btreeFiles = dir.list(btreeFilter);
+ for (String file : btreeFiles) {
+ allBTreeFiles.add(new ComparableFileName(dir.getPath() + File.separator + file));
+ }
+ HashSet<String> btreeFilesSet = new HashSet<String>();
+ for (String file : btreeFiles) {
+ int index = file.lastIndexOf(SPLIT_STRING);
+ btreeFilesSet.add(file.substring(0, index));
+ }
+
+ FilenameFilter rtreeFilter = new FilenameFilter() {
+ public boolean accept(File dir, String name) {
+ return !name.startsWith(".") && name.endsWith(RTREE_STRING);
+ }
+ };
+ String[] rtreeFiles = dir.list(rtreeFilter);
+ for (String file : rtreeFiles) {
+ int index = file.lastIndexOf(SPLIT_STRING);
+ file = file.substring(0, index);
+ if (searchForFileName(btreeFilesSet, file)) {
+ allRTreeFiles.add(new ComparableFileName(dir.getPath() + File.separator + file));
+ } else {
+ // Couldn't find the corresponding BTree file; thus, delete
+ // the RTree file.
+ File invalidRTreeFile = new File(dir.getPath() + File.separator + file);
+ invalidRTreeFile.delete();
+ }
+ }
+ }
+ // Trivial cases.
+ if (allRTreeFiles.isEmpty() || allBTreeFiles.isEmpty()) {
+ return validFiles;
+ }
+
+ if (allRTreeFiles.size() == 1 && allBTreeFiles.size() == 1) {
+ validFiles.add(new LSMRTreeFileNameComponent(allRTreeFiles.get(0).fullPath, allBTreeFiles.get(0).fullPath));
+ return validFiles;
+ }
+
+ // Sorts files names from earliest to latest timestamp.
+ Collections.sort(allRTreeFiles);
+ Collections.sort(allBTreeFiles);
+
+ List<ComparableFileName> validComparableRTreeFiles = new ArrayList<ComparableFileName>();
+ ComparableFileName lastRTree = allRTreeFiles.get(0);
+ validComparableRTreeFiles.add(lastRTree);
+
+ List<ComparableFileName> validComparableBTreeFiles = new ArrayList<ComparableFileName>();
+ ComparableFileName lastBTree = allBTreeFiles.get(0);
+ validComparableBTreeFiles.add(lastBTree);
+
+ for (int i = 1; i < allRTreeFiles.size(); i++) {
+ ComparableFileName currentRTree = allRTreeFiles.get(i);
+ ComparableFileName currentBTree = allBTreeFiles.get(i);
+ // Current start timestamp is greater than last stop timestamp.
+ if (currentRTree.interval[0].compareTo(lastRTree.interval[1]) > 0
+ && currentBTree.interval[0].compareTo(lastBTree.interval[1]) > 0) {
+ validComparableRTreeFiles.add(currentRTree);
+ validComparableBTreeFiles.add(currentBTree);
+ lastRTree = currentRTree;
+ lastBTree = currentBTree;
+ } else if (currentRTree.interval[0].compareTo(lastRTree.interval[0]) >= 0
+ && currentRTree.interval[1].compareTo(lastRTree.interval[1]) <= 0
+ && currentBTree.interval[0].compareTo(lastBTree.interval[0]) >= 0
+ && currentBTree.interval[1].compareTo(lastBTree.interval[1]) <= 0) {
+ // Invalid files are completely contained in last interval.
+ File invalidRTreeFile = new File(currentRTree.fullPath);
+ invalidRTreeFile.delete();
+ File invalidBTreeFile = new File(currentBTree.fullPath);
+ invalidBTreeFile.delete();
+ } else {
+ // This scenario should not be possible.
+ throw new HyracksDataException("Found LSM files with overlapping but not contained timetamp intervals.");
+ }
+ }
+
+ // Sort valid files in reverse lexicographical order, such that newer
+ // files come first.
+ Collections.sort(validComparableRTreeFiles, recencyCmp);
+ Collections.sort(validComparableBTreeFiles, recencyCmp);
+
+ Iterator<ComparableFileName> rtreeFileIter = validComparableRTreeFiles.iterator();
+ Iterator<ComparableFileName> btreeFileIter = validComparableBTreeFiles.iterator();
+ while (rtreeFileIter.hasNext() && btreeFileIter.hasNext()) {
+ ComparableFileName cmpRTreeFileName = rtreeFileIter.next();
+ ComparableFileName cmpBTreeFileName = btreeFileIter.next();
+ validFiles.add(new LSMRTreeFileNameComponent(cmpRTreeFileName.fullPath, cmpBTreeFileName.fullPath));
+ }
+
+ return validFiles;
+ }
+
+ public class LSMRTreeFileNameComponent {
+ private final String rtreeFileName;
+ private final String btreeFileName;
+
+ LSMRTreeFileNameComponent(String rtreeFileName, String btreeFileName) {
+ this.rtreeFileName = rtreeFileName;
+ this.btreeFileName = btreeFileName;
+ }
+
+ public String getRTreeFileName() {
+ return rtreeFileName;
+ }
+
+ public String getBTreeFileName() {
+ return btreeFileName;
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
index 49c6a59..a5e799f 100644
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
+++ b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeOpContext.java
@@ -30,8 +30,8 @@
public final class LSMRTreeOpContext implements IIndexOpContext {
- private RTreeOpContext rtreeOpContext;
- private BTreeOpContext btreeOpContext;
+ public RTreeOpContext rtreeOpContext;
+ public BTreeOpContext btreeOpContext;
public final RTree.RTreeAccessor memRTreeAccessor;
public final BTree.BTreeAccessor memBTreeAccessor;
private IndexOp op;
@@ -43,7 +43,6 @@
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories) {
this.memRTreeAccessor = memRtreeAccessor;
this.memBTreeAccessor = memBtreeAccessor;
- // TODO: Alex. is there a reason we need to create new OpContexts?
this.rtreeOpContext = new RTreeOpContext(rtreeLeafFrame, rtreeInteriorFrame, rtreeMetaFrame, rtreeCmpFactories, rTreeHeightHint);
this.btreeOpContext = new BTreeOpContext(btreeLeafFrameFactory, btreeInteriorFrameFactory, btreeMetaFrame,
btreeCmpFactories);
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
index b277694..fa72596 100644
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
+++ b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeSearchCursor.java
@@ -70,33 +70,45 @@
return true;
}
while (currentCursror < numberOfTrees) {
- while (rtreeCursors[currentCursror].hasNext()) {
- rtreeCursors[currentCursror].next();
- ITupleReference currentTuple = rtreeCursors[currentCursror].getTuple();
+ try {
+ while (rtreeCursors[currentCursror].hasNext()) {
+ rtreeCursors[currentCursror].next();
+ ITupleReference currentTuple = rtreeCursors[currentCursror].getTuple();
- boolean killerTupleFound = false;
- for (int i = 0; i <= currentCursror; i++) {
- btreeRangePredicate.setHighKey(currentTuple, true);
- btreeRangePredicate.setLowKey(currentTuple, true);
+ boolean killerTupleFound = false;
+ for (int i = 0; i <= currentCursror; i++) {
- try {
- diskBTreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
- } catch (TreeIndexException e) {
- throw new HyracksDataException(e);
+ try {
+ btreeCursors[i].reset();
+ btreeRangePredicate.setHighKey(currentTuple, true);
+ btreeRangePredicate.setLowKey(currentTuple, true);
+ diskBTreeAccessors[i].search(btreeCursors[i], btreeRangePredicate);
+ } catch (TreeIndexException e) {
+ throw new HyracksDataException(e);
+ }
+ try {
+ if (btreeCursors[i].hasNext()) {
+ killerTupleFound = true;
+ break;
+ }
+ } finally {
+ btreeCursors[i].close();
+ }
+
}
-
- if (btreeCursors[i].hasNext()) {
- killerTupleFound = true;
- break;
+ if (!killerTupleFound) {
+ frameTuple = currentTuple;
+ foundNext = true;
+ return true;
}
}
- if (!killerTupleFound) {
- frameTuple = currentTuple;
- foundNext = true;
- return true;
+ } finally {
+ if (!foundNext) {
+ rtreeCursors[currentCursror].close();
}
}
currentCursror++;
+
}
return false;
}
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/Pair.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/Pair.java
deleted file mode 100644
index f651376..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/Pair.java
+++ /dev/null
@@ -1,35 +0,0 @@
-/*
- * Copyright 2009-2010 by The Regents of the University of California
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * you may obtain a copy of the License from
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-public class Pair<A, B> {
- private final A first;
- private final B second;
-
- public Pair(A first, B second) {
- super();
- this.first = first;
- this.second = second;
- }
-
- public A getFirst() {
- return first;
- }
-
- public B getSecond() {
- return second;
- }
-}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
index eed2d8b..6b96843 100644
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
+++ b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/utils/LSMRTreeUtils.java
@@ -29,8 +29,8 @@
import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
import edu.uci.ics.hyracks.storage.am.lsm.common.impls.BTreeFactory;
-import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeFileManager;
import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeFileManager;
import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.RTreeFactory;
import edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples.LSMTypeAwareTupleWriterFactory;
import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
@@ -40,8 +40,8 @@
public class LSMRTreeUtils {
public static LSMRTree createLSMTree(InMemoryBufferCache memBufferCache,
- InMemoryFreePageManager memFreePageManager, IOManager ioManager, String onDiskDir, IBufferCache diskBufferCache,
- IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
+ InMemoryFreePageManager memFreePageManager, IOManager ioManager, String onDiskDir,
+ IBufferCache diskBufferCache, IFileMapProvider diskFileMapProvider, ITypeTraits[] typeTraits,
IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
IPrimitiveValueProviderFactory[] valueProviderFactories) {
LSMTypeAwareTupleWriterFactory rtreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
@@ -64,7 +64,7 @@
BTreeFactory diskBTreeFactory = new BTreeFactory(diskBufferCache, freePageManagerFactory, btreeCmpFactories,
typeTraits.length, btreeInteriorFrameFactory, btreeLeafFrameFactory);
- ILSMFileManager fileNameManager = new LSMTreeFileManager(ioManager, onDiskDir);
+ ILSMFileManager fileNameManager = new LSMRTreeFileManager(ioManager, onDiskDir);
LSMRTree lsmTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
diskRTreeFactory, diskBTreeFactory, diskFileMapProvider, typeTraits.length, rtreeCmpFactories,
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
index dd57986..cad7981 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeFrame.java
@@ -15,25 +15,21 @@
package edu.uci.ics.hyracks.storage.am.rtree.api;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
public interface IRTreeFrame extends ITreeIndexFrame {
- public void computeMBR(ISplitKey splitKey);
+ public void delete(int tupleIndex, MultiComparator cmp);
- public void delete(int tupleIndex, MultiComparator cmp);
+ public long getPageNsn();
- public long getPageNsn();
+ public void setPageNsn(long pageNsn);
- public void setPageNsn(long pageNsn);
+ public int getRightPage();
- public int getRightPage();
+ public void setRightPage(int rightPage);
- public void setRightPage(int rightPage);
-
- public void adjustMBR(ITreeIndexTupleReference[] tuples);
+ public void adjustMBR();
}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
index 242adad..5f333f3 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/api/IRTreeInteriorFrame.java
@@ -16,32 +16,29 @@
package edu.uci.ics.hyracks.storage.am.rtree.api;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
public interface IRTreeInteriorFrame extends IRTreeFrame {
- public boolean findBestChild(ITupleReference tuple, MultiComparator cmp);
+ public boolean findBestChild(ITupleReference tuple, MultiComparator cmp);
- public int getBestChildPageId();
+ public int getBestChildPageId();
- public int getChildPageId(int tupleIndex);
-
- public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex,
- MultiComparator cmp);
+ public int getChildPageId(int tupleIndex);
- public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
+ public int getChildPageIdIfIntersect(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
- public int findTupleByPointer(ITupleReference tuple, PathList traverseList,
- int parentId, MultiComparator cmp);
+ public int findTupleByPointer(ITupleReference tuple, MultiComparator cmp);
- public void adjustKey(ITupleReference tuple, int tupleIndex,
- MultiComparator cmp);
+ public int findTupleByPointer(ITupleReference tuple, PathList traverseList, int parentIndex, MultiComparator cmp);
- public boolean recomputeMBR(ITupleReference tuple, int tupleIndex,
- MultiComparator cmp);
+ public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException;
- public void enlarge(ITupleReference tuple, MultiComparator cmp);
+ public boolean recomputeMBR(ITupleReference tuple, int tupleIndex, MultiComparator cmp);
+
+ public void enlarge(ITupleReference tuple, MultiComparator cmp);
boolean checkEnlargement(ITupleReference tuple, MultiComparator cmp);
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
index ea597ec..84e66ef 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMFrame.java
@@ -18,10 +18,13 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.frames.TreeIndexNSMFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
import edu.uci.ics.hyracks.storage.am.rtree.impls.Rectangle;
import edu.uci.ics.hyracks.storage.am.rtree.impls.TupleEntryArrayList;
@@ -117,6 +120,161 @@
return tuples;
}
+ @Override
+ public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
+ RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+ RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
+
+ // calculations are based on the R*-tree paper
+ int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
+ int splitDistribution = getTupleCount() - (2 * m) + 2;
+
+ // to calculate the minimum margin in order to pick the split axis
+ double minMargin = Double.MAX_VALUE;
+ int splitAxis = 0, sortOrder = 0;
+
+ int maxFieldPos = keyValueProviders.length / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ for (int k = 0; k < getTupleCount(); ++k) {
+
+ frameTuple.resetByTupleIndex(this, k);
+ double LowerKey = keyValueProviders[i]
+ .getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
+ double UpperKey = keyValueProviders[j]
+ .getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
+
+ tupleEntries1.add(k, LowerKey);
+ tupleEntries2.add(k, UpperKey);
+ }
+ double LowerKey = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
+ double UpperKey = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
+
+ tupleEntries1.add(-1, LowerKey);
+ tupleEntries2.add(-1, UpperKey);
+
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+ tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+ double lowerMargin = 0.0, upperMargin = 0.0;
+ // generate distribution
+ for (int k = 1; k <= splitDistribution; ++k) {
+ int d = m - 1 + k;
+
+ generateDist(tuple, tupleEntries1, rec[0], 0, d);
+ generateDist(tuple, tupleEntries2, rec[1], 0, d);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+ generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
+
+ // calculate the margin of the distributions
+ lowerMargin += rec[0].margin() + rec[2].margin();
+ upperMargin += rec[1].margin() + rec[3].margin();
+ }
+ double margin = Math.min(lowerMargin, upperMargin);
+
+ // store minimum margin as split axis
+ if (margin < minMargin) {
+ minMargin = margin;
+ splitAxis = i;
+ sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
+ }
+
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ }
+
+ for (int i = 0; i < getTupleCount(); ++i) {
+ frameTuple.resetByTupleIndex(this, i);
+ double key = keyValueProviders[splitAxis + sortOrder].getValue(
+ frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(i, key);
+ }
+ double key = keyValueProviders[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
+ tuple.getFieldStart(splitAxis + sortOrder));
+ tupleEntries1.add(-1, key);
+ tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
+
+ double minArea = Double.MAX_VALUE;
+ double minOverlap = Double.MAX_VALUE;
+ int splitPoint = 0;
+ for (int i = 1; i <= splitDistribution; ++i) {
+ int d = m - 1 + i;
+
+ generateDist(tuple, tupleEntries1, rec[0], 0, d);
+ generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
+
+ double overlap = rec[0].overlappedArea(rec[2]);
+ if (overlap < minOverlap) {
+ splitPoint = d;
+ minOverlap = overlap;
+ minArea = rec[0].area() + rec[2].area();
+ } else if (overlap == minOverlap) {
+ double area = rec[0].area() + rec[2].area();
+ if (area < minArea) {
+ splitPoint = d;
+ minArea = area;
+ }
+ }
+ }
+ int startIndex, endIndex;
+ if (splitPoint < (getTupleCount() + 1) / 2) {
+ startIndex = 0;
+ endIndex = splitPoint;
+ } else {
+ startIndex = splitPoint;
+ endIndex = (getTupleCount() + 1);
+ }
+ boolean tupleInserted = false;
+ int totalBytes = 0, numOfDeletedTuples = 0;
+ for (int i = startIndex; i < endIndex; i++) {
+ if (tupleEntries1.get(i).getTupleIndex() != -1) {
+ frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
+ rightFrame.insert(frameTuple, -1);
+ ((UnorderedSlotManager) slotManager).modifySlot(
+ slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
+ totalBytes += getTupleSize(frameTuple);
+ numOfDeletedTuples++;
+ } else {
+ rightFrame.insert(tuple, -1);
+ tupleInserted = true;
+ }
+ }
+
+ ((UnorderedSlotManager) slotManager).deleteEmptySlots();
+
+ // maintain space information
+ buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
+ + (slotManager.getSlotSize() * numOfDeletedTuples));
+
+ // compact both pages
+ rightFrame.compact();
+ compact();
+
+ if (!tupleInserted) {
+ insert(tuple, -1);
+ }
+
+ int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
+ frameTuple.resetByTupleOffset(buf, tupleOff);
+ int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
+
+ splitKey.initData(splitKeySize);
+ this.adjustMBR();
+ rTreeTupleWriterLeftFrame.writeTupleFields(getTuples(), 0, rTreeSplitKey.getLeftPageBuffer(), 0);
+ rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
+
+ ((IRTreeFrame) rightFrame).adjustMBR();
+ rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
+ rTreeSplitKey.getRightPageBuffer(), 0);
+ rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
+
+ tupleEntries1.clear();
+ tupleEntries2.clear();
+ }
+
+ abstract public int getTupleSize(ITupleReference tuple);
+
public void generateDist(ITupleReference tuple, TupleEntryArrayList entries, Rectangle rec, int start, int end) {
int j = 0;
while (entries.get(j).getTupleIndex() == -1) {
@@ -157,20 +315,17 @@
}
@Override
- public void computeMBR(ISplitKey splitKey) {
- RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
- RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
+ public void adjustMBR() {
+ for (int i = 0; i < tuples.length; i++) {
+ tuples[i].setFieldCount(getFieldCount());
+ tuples[i].resetByTupleIndex(this, 0);
+ }
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
-
- splitKey.initData(splitKeySize);
- this.adjustMBR(tuples);
- rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
- rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
+ adjustMBRImpl(tuples);
}
+ public abstract int getFieldCount();
+
@Override
public int getPageHeaderSize() {
return rightPageOff;
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
index 99a51a4..63387ef 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMInteriorFrame.java
@@ -24,26 +24,20 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.frames.FrameOpSpaceStatus;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.SlotOffTupleOff;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
import edu.uci.ics.hyracks.storage.am.rtree.impls.PathList;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
public class RTreeNSMInteriorFrame extends RTreeNSMFrame implements IRTreeInteriorFrame {
private static final int childPtrSize = 4;
- private static IBinaryComparator childPtrCmp = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
+ private IBinaryComparator childPtrCmp = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY)
.createBinaryComparator();
private final int keyFieldCount;
@@ -260,10 +254,7 @@
@Override
public FrameOpSpaceStatus hasSpaceInsert(ITupleReference tuple) {
- int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize; // for
- // the
- // child
- // pointer
+ int bytesRequired = tupleWriter.bytesRequired(tuple) + childPtrSize;
if (bytesRequired + slotManager.getSlotSize() <= buf.capacity() - buf.getInt(freeSpaceOff)
- (buf.getInt(tupleCountOff) * slotManager.getSlotSize()))
return FrameOpSpaceStatus.SUFFICIENT_CONTIGUOUS_SPACE;
@@ -274,14 +265,18 @@
}
@Override
- public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) {
+ public void adjustKey(ITupleReference tuple, int tupleIndex, MultiComparator cmp) throws TreeIndexException {
frameTuple.setFieldCount(cmp.getKeyFieldCount());
if (tupleIndex == -1) {
tupleIndex = findTupleByPointer(tuple, cmp);
}
if (tupleIndex != -1) {
tupleWriter.writeTuple(tuple, buf.array(), getTupleOffset(tupleIndex));
+ } else {
+ throw new TreeIndexException("Error: Faild to find a tuple in a page");
+
}
+
}
private int pointerCmp(ITupleReference tupleA, ITupleReference tupleB, MultiComparator cmp) {
@@ -290,157 +285,8 @@
tupleB.getFieldData(cmp.getKeyFieldCount() - 1), getChildPointerOff(tupleB), childPtrSize);
}
- @Override
- public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
- RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
- RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
-
- // calculations are based on the R*-tree paper
- int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
- int splitDistribution = getTupleCount() - (2 * m) + 2;
-
- // to calculate the minimum margin in order to pick the split axis
- double minMargin = Double.MAX_VALUE;
- int splitAxis = 0, sortOrder = 0;
-
- int maxFieldPos = keyValueProviders.length / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- for (int k = 0; k < getTupleCount(); ++k) {
-
- frameTuple.resetByTupleIndex(this, k);
- double LowerKey = keyValueProviders[i]
- .getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
- double UpperKey = keyValueProviders[j]
- .getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
-
- tupleEntries1.add(k, LowerKey);
- tupleEntries2.add(k, UpperKey);
- }
- double LowerKey = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
- double UpperKey = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
-
- tupleEntries1.add(-1, LowerKey);
- tupleEntries2.add(-1, UpperKey);
-
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
-
- double lowerMargin = 0.0, upperMargin = 0.0;
- // generate distribution
- for (int k = 1; k <= splitDistribution; ++k) {
- int d = m - 1 + k;
-
- generateDist(tuple, tupleEntries1, rec[0], 0, d);
- generateDist(tuple, tupleEntries2, rec[1], 0, d);
- generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
- generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
-
- // calculate the margin of the distributions
- lowerMargin += rec[0].margin() + rec[2].margin();
- upperMargin += rec[1].margin() + rec[3].margin();
- }
- double margin = Math.min(lowerMargin, upperMargin);
-
- // store minimum margin as split axis
- if (margin < minMargin) {
- minMargin = margin;
- splitAxis = i;
- sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
- }
-
- tupleEntries1.clear();
- tupleEntries2.clear();
- }
-
- for (int i = 0; i < getTupleCount(); ++i) {
- frameTuple.resetByTupleIndex(this, i);
- double key = keyValueProviders[splitAxis + sortOrder].getValue(
- frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(i, key);
- }
- double key = keyValueProviders[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
- tuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(-1, key);
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
-
- double minArea = Double.MAX_VALUE;
- double minOverlap = Double.MAX_VALUE;
- int splitPoint = 0;
- for (int i = 1; i <= splitDistribution; ++i) {
- int d = m - 1 + i;
-
- generateDist(tuple, tupleEntries1, rec[0], 0, d);
- generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
-
- double overlap = rec[0].overlappedArea(rec[2]);
- if (overlap < minOverlap) {
- splitPoint = d;
- minOverlap = overlap;
- minArea = rec[0].area() + rec[2].area();
- } else if (overlap == minOverlap) {
- double area = rec[0].area() + rec[2].area();
- if (area < minArea) {
- splitPoint = d;
- minArea = area;
- }
- }
- }
- int startIndex, endIndex;
- if (splitPoint < (getTupleCount() + 1) / 2) {
- startIndex = 0;
- endIndex = splitPoint;
- } else {
- startIndex = splitPoint;
- endIndex = (getTupleCount() + 1);
- }
- boolean tupleInserted = false;
- int totalBytes = 0, numOfDeletedTuples = 0;
- for (int i = startIndex; i < endIndex; i++) {
- if (tupleEntries1.get(i).getTupleIndex() != -1) {
- frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
- rightFrame.insert(frameTuple, -1);
- ((UnorderedSlotManager) slotManager).modifySlot(
- slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
- totalBytes += tupleWriter.bytesRequired(frameTuple) + childPtrSize;
- numOfDeletedTuples++;
- } else {
- rightFrame.insert(tuple, -1);
- tupleInserted = true;
- }
- }
-
- ((UnorderedSlotManager) slotManager).deleteEmptySlots();
-
- // maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
- + (slotManager.getSlotSize() * numOfDeletedTuples));
-
- // compact both pages
- rightFrame.compact();
- compact();
-
- if (!tupleInserted) {
- insert(tuple, -1);
- }
-
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
-
- splitKey.initData(splitKeySize);
- this.adjustMBR(tuples);
- rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
- rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
-
- ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples());
- rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
- rTreeSplitKey.getRightPageBuffer(), 0);
- rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
-
- tupleEntries1.clear();
- tupleEntries2.clear();
+ public int getTupleSize(ITupleReference tuple) {
+ return tupleWriter.bytesRequired(tuple) + childPtrSize;
}
private int getChildPointerOff(ITupleReference tuple) {
@@ -642,16 +488,6 @@
}
}
- @Override
- public void adjustMBR(ITreeIndexTupleReference[] tuples) {
- for (int i = 0; i < tuples.length; i++) {
- tuples[i].setFieldCount(keyValueProviders.length);
- tuples[i].resetByTupleIndex(this, 0);
- }
-
- adjustMBRImpl(tuples);
- }
-
// For debugging.
public ArrayList<Integer> getChildren(MultiComparator cmp) {
ArrayList<Integer> ret = new ArrayList<Integer>();
@@ -668,4 +504,9 @@
}
return ret;
}
+
+ @Override
+ public int getFieldCount() {
+ return keyValueProviders.length;
+ }
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
index 858b1d5..f1d71ff 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/frames/RTreeNSMLeafFrame.java
@@ -17,18 +17,10 @@
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProvider;
-import edu.uci.ics.hyracks.storage.am.common.api.ISplitKey;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrame;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
-import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
-import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.EntriesOrder;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.RTreeSplitKey;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.UnorderedSlotManager;
-import edu.uci.ics.hyracks.storage.am.rtree.tuples.RTreeTypeAwareTupleWriter;
public class RTreeNSMLeafFrame extends RTreeNSMFrame implements IRTreeLeafFrame {
@@ -68,159 +60,8 @@
return true;
}
- @Override
- public void split(ITreeIndexFrame rightFrame, ITupleReference tuple, ISplitKey splitKey) throws TreeIndexException {
-
- RTreeSplitKey rTreeSplitKey = ((RTreeSplitKey) splitKey);
- RTreeTypeAwareTupleWriter rTreeTupleWriterLeftFrame = ((RTreeTypeAwareTupleWriter) tupleWriter);
- RTreeTypeAwareTupleWriter rTreeTupleWriterRightFrame = ((RTreeTypeAwareTupleWriter) rightFrame.getTupleWriter());
-
- // calculations are based on the R*-tree paper
- int m = (int) Math.floor((getTupleCount() + 1) * splitFactor);
- int splitDistribution = getTupleCount() - (2 * m) + 2;
-
- // to calculate the minimum margin in order to pick the split axis
- double minMargin = Double.MAX_VALUE;
- int splitAxis = 0, sortOrder = 0;
-
- int maxFieldPos = keyValueProviders.length / 2;
- for (int i = 0; i < maxFieldPos; i++) {
- int j = maxFieldPos + i;
- for (int k = 0; k < getTupleCount(); ++k) {
-
- frameTuple.resetByTupleIndex(this, k);
-
- double LowerKey = keyValueProviders[i]
- .getValue(frameTuple.getFieldData(i), frameTuple.getFieldStart(i));
- double UpperKey = keyValueProviders[j]
- .getValue(frameTuple.getFieldData(j), frameTuple.getFieldStart(j));
-
- tupleEntries1.add(k, LowerKey);
- tupleEntries2.add(k, UpperKey);
- }
- double LowerKey = keyValueProviders[i].getValue(tuple.getFieldData(i), tuple.getFieldStart(i));
- double UpperKey = keyValueProviders[j].getValue(tuple.getFieldData(j), tuple.getFieldStart(j));
-
- tupleEntries1.add(-1, LowerKey);
- tupleEntries2.add(-1, UpperKey);
-
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
- tupleEntries2.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
-
- double lowerMargin = 0.0, upperMargin = 0.0;
- // generate distribution
- for (int k = 1; k <= splitDistribution; ++k) {
- int d = m - 1 + k;
-
- generateDist(tuple, tupleEntries1, rec[0], 0, d);
- generateDist(tuple, tupleEntries2, rec[1], 0, d);
- generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
- generateDist(tuple, tupleEntries2, rec[3], d, getTupleCount() + 1);
-
- // calculate the margin of the distributions
- lowerMargin += rec[0].margin() + rec[2].margin();
- upperMargin += rec[1].margin() + rec[3].margin();
- }
- double margin = Math.min(lowerMargin, upperMargin);
-
- // store minimum margin as split axis
- if (margin < minMargin) {
- minMargin = margin;
- splitAxis = i;
- sortOrder = (lowerMargin < upperMargin) ? 0 : 2;
- }
-
- tupleEntries1.clear();
- tupleEntries2.clear();
- }
-
- for (int i = 0; i < getTupleCount(); ++i) {
- frameTuple.resetByTupleIndex(this, i);
- double key = keyValueProviders[splitAxis + sortOrder].getValue(
- frameTuple.getFieldData(splitAxis + sortOrder), frameTuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(i, key);
- }
- double key = keyValueProviders[splitAxis + sortOrder].getValue(tuple.getFieldData(splitAxis + sortOrder),
- tuple.getFieldStart(splitAxis + sortOrder));
- tupleEntries1.add(-1, key);
- tupleEntries1.sort(EntriesOrder.ASCENDING, getTupleCount() + 1);
-
- double minArea = Double.MAX_VALUE;
- double minOverlap = Double.MAX_VALUE;
- int splitPoint = 0;
- for (int i = 1; i <= splitDistribution; ++i) {
- int d = m - 1 + i;
-
- generateDist(tuple, tupleEntries1, rec[0], 0, d);
- generateDist(tuple, tupleEntries1, rec[2], d, getTupleCount() + 1);
-
- double overlap = rec[0].overlappedArea(rec[2]);
- if (overlap < minOverlap) {
- splitPoint = d;
- minOverlap = overlap;
- minArea = rec[0].area() + rec[2].area();
- } else if (overlap == minOverlap) {
- double area = rec[0].area() + rec[2].area();
- if (area < minArea) {
- splitPoint = d;
- minArea = area;
- }
- }
- }
- int startIndex, endIndex;
- if (splitPoint < (getTupleCount() + 1) / 2) {
- startIndex = 0;
- endIndex = splitPoint;
- } else {
- startIndex = splitPoint;
- endIndex = (getTupleCount() + 1);
- }
- boolean tupleInserted = false;
- int totalBytes = 0, numOfDeletedTuples = 0;
- for (int i = startIndex; i < endIndex; i++) {
- if (tupleEntries1.get(i).getTupleIndex() != -1) {
- frameTuple.resetByTupleIndex(this, tupleEntries1.get(i).getTupleIndex());
- rightFrame.insert(frameTuple, -1);
- ((UnorderedSlotManager) slotManager).modifySlot(
- slotManager.getSlotOff(tupleEntries1.get(i).getTupleIndex()), -1);
- totalBytes += tupleWriter.bytesRequired(frameTuple);
- numOfDeletedTuples++;
- } else {
- rightFrame.insert(tuple, -1);
- tupleInserted = true;
- }
- }
-
- ((UnorderedSlotManager) slotManager).deleteEmptySlots();
-
- // maintain space information
- buf.putInt(totalFreeSpaceOff, buf.getInt(totalFreeSpaceOff) + totalBytes
- + (slotManager.getSlotSize() * numOfDeletedTuples));
-
- // compact both pages
- rightFrame.compact();
- compact();
-
- if (!tupleInserted) {
- insert(tuple, -1);
- }
-
- int tupleOff = slotManager.getTupleOff(slotManager.getSlotEndOff());
- frameTuple.resetByTupleOffset(buf, tupleOff);
- int splitKeySize = tupleWriter.bytesRequired(frameTuple, 0, keyValueProviders.length);
-
- splitKey.initData(splitKeySize);
- this.adjustMBR(tuples);
- rTreeTupleWriterLeftFrame.writeTupleFields(tuples, 0, rTreeSplitKey.getLeftPageBuffer(), 0);
- rTreeSplitKey.getLeftTuple().resetByTupleOffset(rTreeSplitKey.getLeftPageBuffer(), 0);
-
- ((IRTreeFrame) rightFrame).adjustMBR(((RTreeNSMFrame) rightFrame).getTuples());
- rTreeTupleWriterRightFrame.writeTupleFields(((RTreeNSMFrame) rightFrame).getTuples(), 0,
- rTreeSplitKey.getRightPageBuffer(), 0);
- rTreeSplitKey.getRightTuple().resetByTupleOffset(rTreeSplitKey.getRightPageBuffer(), 0);
-
- tupleEntries1.clear();
- tupleEntries2.clear();
+ public int getTupleSize(ITupleReference tuple) {
+ return tupleWriter.bytesRequired(tuple);
}
@Override
@@ -252,10 +93,7 @@
}
@Override
- public void adjustMBR(ITreeIndexTupleReference[] tuples) {
- for (int i = 0; i < tuples.length; i++) {
- tuples[i].resetByTupleIndex(this, 0);
- }
- adjustMBRImpl(tuples);
+ public int getFieldCount() {
+ return frameTuple.getFieldCount();
}
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
index 4f86111..911029d 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/PathList.java
@@ -19,93 +19,99 @@
import edu.uci.ics.hyracks.storage.am.common.ophelpers.LongArrayList;
public class PathList {
- private IntArrayList pageIds;
- private LongArrayList pageLsns;
- private IntArrayList pageIndexes;
+ private IntArrayList pageIds;
+ private LongArrayList pageLsns;
+ private IntArrayList pageIndexes;
- public PathList(int initialCapacity, int growth) {
- pageIds = new IntArrayList(initialCapacity, growth);
- pageLsns = new LongArrayList(initialCapacity, growth);
- pageIndexes = new IntArrayList(initialCapacity, growth);
- }
+ public PathList(int initialCapacity, int growth) {
+ pageIds = new IntArrayList(initialCapacity, growth);
+ pageLsns = new LongArrayList(initialCapacity, growth);
+ pageIndexes = new IntArrayList(initialCapacity, growth);
+ }
- public int size() {
- return pageIds.size();
- }
+ public int size() {
+ return pageIds.size();
+ }
- public int first() {
- return pageIds.first();
- }
+ public int first() {
+ return pageIds.first();
+ }
- public void add(int pageId, long pageLsn, int pageIndex) {
- pageIds.add(pageId);
- pageLsns.add(pageLsn);
- pageIndexes.add(pageIndex);
- }
+ public void add(int pageId, long pageLsn, int pageIndex) {
+ pageIds.add(pageId);
+ pageLsns.add(pageLsn);
+ pageIndexes.add(pageIndex);
+ }
- public int getFirstPageId() {
- return pageIds.getFirst();
- }
+ public void addFirst(int pageId, long pageLsn, int pageIndex) {
+ pageIds.addFirst(pageId);
+ pageLsns.addFirst(pageLsn);
+ pageIndexes.addFirst(pageIndex);
+ }
- public long getFirstPageLsn() {
- return pageLsns.getFirst();
- }
+ public int getFirstPageId() {
+ return pageIds.getFirst();
+ }
- public int getFirstPageIndex() {
- return pageIndexes.getFirst();
- }
+ public long getFirstPageLsn() {
+ return pageLsns.getFirst();
+ }
- public int getLastPageId() {
- return pageIds.getLast();
- }
+ public int getFirstPageIndex() {
+ return pageIndexes.getFirst();
+ }
- public long getLastPageLsn() {
- return pageLsns.getLast();
- }
+ public int getLastPageId() {
+ return pageIds.getLast();
+ }
- public int getLastPageIndex() {
- return pageIndexes.getLast();
- }
+ public long getLastPageLsn() {
+ return pageLsns.getLast();
+ }
- public int getPageId(int i) {
- return pageIds.get(i);
- }
+ public int getLastPageIndex() {
+ return pageIndexes.getLast();
+ }
- public long getPageLsn(int i) {
- return pageLsns.get(i);
- }
+ public int getPageId(int i) {
+ return pageIds.get(i);
+ }
- public int getPageIndex(int i) {
- return pageIndexes.get(i);
- }
+ public long getPageLsn(int i) {
+ return pageLsns.get(i);
+ }
- public void setPageLsn(int i, long pageLsn) {
- pageLsns.set(i, pageLsn);
- }
+ public int getPageIndex(int i) {
+ return pageIndexes.get(i);
+ }
- public void moveFirst() {
- pageIds.moveFirst();
- pageLsns.moveFirst();
- pageIndexes.moveFirst();
- }
+ public void setPageLsn(int i, long pageLsn) {
+ pageLsns.set(i, pageLsn);
+ }
- public void moveLast() {
- pageIds.removeLast();
- pageLsns.removeLast();
- pageIndexes.removeLast();
- }
+ public void moveFirst() {
+ pageIds.moveFirst();
+ pageLsns.moveFirst();
+ pageIndexes.moveFirst();
+ }
- public boolean isLast() {
- return pageIds.isLast();
- }
+ public void moveLast() {
+ pageIds.removeLast();
+ pageLsns.removeLast();
+ pageIndexes.removeLast();
+ }
- public void clear() {
- pageIds.clear();
- pageLsns.clear();
- pageIndexes.clear();
- }
+ public boolean isLast() {
+ return pageIds.isLast();
+ }
- public boolean isEmpty() {
- return pageIds.isEmpty();
- }
+ public void clear() {
+ pageIds.clear();
+ pageLsns.clear();
+ pageIndexes.clear();
+ }
+
+ public boolean isEmpty() {
+ return pageIds.isEmpty();
+ }
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
index bda4292..b826544 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -79,12 +79,8 @@
this.treeLatch = new ReentrantReadWriteLock(true);
}
- public void incrementGlobalNsn() {
- globalNsn.incrementAndGet();
- }
-
- public long getGlobalNsn() {
- return globalNsn.get();
+ private long incrementGlobalNsn() {
+ return globalNsn.incrementAndGet();
}
public byte getTreeHeight(IRTreeLeafFrame leafFrame) throws HyracksDataException {
@@ -129,14 +125,24 @@
}
String keyString;
+ long LSN, NSN;
+ int rightPage;
if (interiorFrame.isLeaf()) {
leafFrame.setPage(node);
keyString = TreeIndexUtils.printFrameTuples(leafFrame, keySerdes);
+ LSN = leafFrame.getPageLsn();
+ NSN = leafFrame.getPageNsn();
+ rightPage = leafFrame.getRightPage();
+
} else {
keyString = TreeIndexUtils.printFrameTuples(interiorFrame, keySerdes);
+ LSN = interiorFrame.getPageLsn();
+ NSN = interiorFrame.getPageNsn();
+ rightPage = interiorFrame.getRightPage();
}
- strBuilder.append(keyString + "\n");
+ strBuilder.append(keyString + "\n" + "pageId: " + pageId + " LSN: " + LSN + " NSN: " + NSN + " rightPage: "
+ + rightPage + "\n");
if (!interiorFrame.isLeaf()) {
ArrayList<Integer> children = ((RTreeNSMInteriorFrame) (interiorFrame)).getChildren(cmp);
for (int i = 0; i < children.size(); i++) {
@@ -216,23 +222,35 @@
throw new IllegalArgumentException("The low key point has larger coordinates than the high key point.");
}
}
+ try {
+ ICachedPage leafNode = findLeaf(ctx);
- ICachedPage leafNode = findLeaf(ctx);
+ int pageId = ctx.pathList.getLastPageId();
+ ctx.pathList.moveLast();
+ insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
- int pageId = ctx.pathList.getLastPageId();
- ctx.pathList.moveLast();
- insertTuple(leafNode, pageId, ctx.getTuple(), ctx, true);
+ while (true) {
+ if (ctx.splitKey.getLeftPageBuffer() != null) {
+ updateParentForInsert(ctx);
+ } else {
+ break;
+ }
+ }
+ } finally {
+ for (int i = ctx.NSNUpdates.size() - 1; i >= 0; i--) {
+ ICachedPage node = ctx.NSNUpdates.get(i);
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.setPageNsn(incrementGlobalNsn());
+ }
- while (true) {
- if (ctx.splitKey.getLeftPageBuffer() != null) {
- updateParentForInsert(ctx);
- } else {
- break;
+ for (int i = ctx.LSNUpdates.size() - 1; i >= 0; i--) {
+ ICachedPage node = ctx.LSNUpdates.get(i);
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.setPageLsn(incrementGlobalNsn());
+ node.releaseWriteLatch();
+ bufferCache.unpin(node);
}
}
-
- leafNode.releaseWriteLatch();
- bufferCache.unpin(leafNode);
}
private ICachedPage findLeaf(RTreeOpContext ctx) throws HyracksDataException {
@@ -298,11 +316,13 @@
if (!isLeaf) {
// findBestChild must be called *before* getBestChildPageId
- ctx.interiorFrame.findBestChild(ctx.getTuple(), ctx.cmp);
+ boolean enlarementIsNeeded = ctx.interiorFrame.findBestChild(ctx.getTuple(), ctx.cmp);
int childPageId = ctx.interiorFrame.getBestChildPageId();
// check if enlargement is needed
- boolean enlarementIsNeeded = ctx.interiorFrame.checkEnlargement(ctx.getTuple(), ctx.cmp);
+ // boolean enlarementIsNeeded =
+ // ctx.interiorFrame.checkEnlargement(ctx.getTuple(),
+ // ctx.cmp);
if (enlarementIsNeeded) {
if (!writeLatched) {
node.releaseReadLatch();
@@ -379,13 +399,10 @@
case SUFFICIENT_CONTIGUOUS_SPACE: {
if (!isLeaf) {
ctx.interiorFrame.insert(tuple, -1);
- incrementGlobalNsn();
- ctx.interiorFrame.setPageLsn(getGlobalNsn());
} else {
ctx.leafFrame.insert(tuple, -1);
- incrementGlobalNsn();
- ctx.leafFrame.setPageLsn(getGlobalNsn());
}
+ ctx.LSNUpdates.add(node);
ctx.splitKey.reset();
break;
}
@@ -394,14 +411,11 @@
if (!isLeaf) {
ctx.interiorFrame.compact();
ctx.interiorFrame.insert(tuple, -1);
- incrementGlobalNsn();
- ctx.interiorFrame.setPageLsn(getGlobalNsn());
} else {
ctx.leafFrame.compact();
ctx.leafFrame.insert(tuple, -1);
- incrementGlobalNsn();
- ctx.leafFrame.setPageLsn(getGlobalNsn());
}
+ ctx.LSNUpdates.add(node);
ctx.splitKey.reset();
break;
}
@@ -411,67 +425,59 @@
ICachedPage rightNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, rightPageId), true);
rightNode.acquireWriteLatch();
- try {
- IRTreeFrame rightFrame;
- if (!isLeaf) {
- rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
- ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey);
- ctx.interiorFrame.setRightPage(rightPageId);
- rightFrame.setPageNsn(ctx.interiorFrame.getPageNsn());
- incrementGlobalNsn();
- long newNsn = getGlobalNsn();
- rightFrame.setPageLsn(newNsn);
- ctx.interiorFrame.setPageNsn(newNsn);
- ctx.interiorFrame.setPageLsn(newNsn);
- } else {
- rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
- rightFrame.setPage(rightNode);
- rightFrame.initBuffer((byte) 0);
- ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
- ctx.leafFrame.setRightPage(rightPageId);
- rightFrame.setPageNsn(ctx.leafFrame.getPageNsn());
- incrementGlobalNsn();
- long newNsn = getGlobalNsn();
- rightFrame.setPageLsn(newNsn);
- ctx.leafFrame.setPageNsn(newNsn);
- ctx.leafFrame.setPageLsn(newNsn);
- }
- ctx.splitKey.setPages(pageId, rightPageId);
- if (pageId == rootPage) {
- int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
- ICachedPage newLeftNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId),
- true);
- newLeftNode.acquireWriteLatch();
- try {
- // copy left child to new left child
- System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0,
- newLeftNode.getBuffer().capacity());
+ IRTreeFrame rightFrame;
+ if (!isLeaf) {
+ rightFrame = (IRTreeFrame) interiorFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) ctx.interiorFrame.getLevel());
+ rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
+ ctx.interiorFrame.split(rightFrame, tuple, ctx.splitKey);
+ ctx.interiorFrame.setRightPage(rightPageId);
+ ctx.NSNUpdates.add(rightNode);
+ ctx.LSNUpdates.add(rightNode);
+ ctx.NSNUpdates.add(node);
+ ctx.LSNUpdates.add(node);
+ } else {
+ rightFrame = (IRTreeFrame) leafFrameFactory.createFrame();
+ rightFrame.setPage(rightNode);
+ rightFrame.initBuffer((byte) 0);
+ rightFrame.setRightPage(ctx.interiorFrame.getRightPage());
+ ctx.leafFrame.split(rightFrame, tuple, ctx.splitKey);
+ ctx.leafFrame.setRightPage(rightPageId);
+ ctx.NSNUpdates.add(rightNode);
+ ctx.LSNUpdates.add(rightNode);
+ ctx.NSNUpdates.add(node);
+ ctx.LSNUpdates.add(node);
+ }
+ ctx.splitKey.setPages(pageId, rightPageId);
+ if (pageId == rootPage) {
+ int newLeftId = freePageManager.getFreePage(ctx.metaFrame);
+ ICachedPage newLeftNode = bufferCache
+ .pin(BufferedFileHandle.getDiskPageId(fileId, newLeftId), true);
+ newLeftNode.acquireWriteLatch();
- // initialize new root (leftNode becomes new root)
- ctx.interiorFrame.setPage(node);
- ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
+ // copy left child to new left child
+ System.arraycopy(node.getBuffer().array(), 0, newLeftNode.getBuffer().array(), 0, newLeftNode
+ .getBuffer().capacity());
- ctx.splitKey.setLeftPage(newLeftId);
+ // initialize new root (leftNode becomes new root)
+ ctx.interiorFrame.setPage(node);
+ ctx.interiorFrame.initBuffer((byte) (ctx.interiorFrame.getLevel() + 1));
- ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), -1);
- ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), -1);
+ ctx.splitKey.setLeftPage(newLeftId);
+ ctx.interiorFrame.insert(ctx.splitKey.getLeftTuple(), -1);
+ ctx.interiorFrame.insert(ctx.splitKey.getRightTuple(), -1);
- incrementGlobalNsn();
- long newNsn = getGlobalNsn();
- ctx.interiorFrame.setPageLsn(newNsn);
- ctx.interiorFrame.setPageNsn(newNsn);
- } finally {
- newLeftNode.releaseWriteLatch();
- bufferCache.unpin(newLeftNode);
- }
+ ctx.NSNUpdates.remove(ctx.NSNUpdates.size() - 1);
+ ctx.LSNUpdates.remove(ctx.LSNUpdates.size() - 1);
- ctx.splitKey.reset();
- }
- } finally {
- rightNode.releaseWriteLatch();
- bufferCache.unpin(rightNode);
+ ctx.NSNUpdates.add(newLeftNode);
+ ctx.LSNUpdates.add(newLeftNode);
+
+ ctx.NSNUpdates.add(node);
+ ctx.LSNUpdates.add(node);
+
+ ctx.splitKey.reset();
}
break;
}
@@ -487,61 +493,54 @@
ctx.interiorFrame.setPage(parentNode);
boolean foundParent = true;
- try {
+ if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
+ foundParent = false;
+ while (true) {
+ if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.cmp) != -1) {
+ // found the parent
+ foundParent = true;
+ break;
+ }
+ int rightPage = ctx.interiorFrame.getRightPage();
+ parentNode.releaseWriteLatch();
+ writeLatched = false;
+ bufferCache.unpin(parentNode);
- if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
- foundParent = false;
- while (true) {
- if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.cmp) != -1) {
- // found the parent
- foundParent = true;
- break;
- }
- int rightPage = ctx.interiorFrame.getRightPage();
+ if (rightPage == -1) {
+ break;
+ }
+
+ parentId = rightPage;
+ parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
+ parentNode.acquireWriteLatch();
+ writeLatched = true;
+ ctx.interiorFrame.setPage(parentNode);
+ }
+ }
+
+ if (foundParent) {
+ try {
+ ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, ctx.cmp);
+ } catch (TreeIndexException e) {
+ if (writeLatched) {
parentNode.releaseWriteLatch();
writeLatched = false;
bufferCache.unpin(parentNode);
-
- if (rightPage == -1) {
- break;
- }
-
- parentId = rightPage;
- parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
- parentNode.acquireWriteLatch();
- writeLatched = true;
- ctx.interiorFrame.setPage(parentNode);
}
+ throw e;
}
- if (foundParent) {
- ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), -1, ctx.cmp);
- insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
- ctx.pathList.moveLast();
+ insertTuple(parentNode, parentId, ctx.splitKey.getRightTuple(), ctx, ctx.interiorFrame.isLeaf());
+ ctx.pathList.moveLast();
+ return;
- parentNode.releaseWriteLatch();
- writeLatched = false;
- bufferCache.unpin(parentNode);
- return;
- }
-
- } finally {
- if (writeLatched) {
- parentNode.releaseWriteLatch();
- writeLatched = false;
- bufferCache.unpin(parentNode);
- }
}
- // very rare situation when the there is a root split, do an
- // exhaustive
- // breadth-first traversal looking for the parent tuple
- ctx.pathList.clear();
ctx.traverseList.clear();
findPath(ctx);
updateParentForInsert(ctx);
}
- private void findPath(RTreeOpContext ctx) throws HyracksDataException {
+ private void findPath(RTreeOpContext ctx) throws TreeIndexException, HyracksDataException {
boolean readLatched = false;
int pageId = rootPage;
int parentIndex = -1;
@@ -565,16 +564,23 @@
ctx.traverseList.moveFirst();
+ if (ctx.interiorFrame.isLeaf()) {
+ throw new TreeIndexException("Error: Failed to re-find parent of a page in the tree.");
+ }
+
+ if (pageId != rootPage) {
+ parentLsn = ctx.traverseList.getPageLsn(ctx.traverseList.getPageIndex(pageIndex));
+ }
if (pageId != rootPage && parentLsn < ctx.interiorFrame.getPageNsn()) {
int rightPage = ctx.interiorFrame.getRightPage();
if (rightPage != -1) {
- ctx.traverseList.add(rightPage, -1, parentIndex);
+ ctx.traverseList.addFirst(rightPage, -1, parentIndex);
}
}
- parentLsn = pageLsn;
if (ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.traverseList, pageIndex,
ctx.cmp) != -1) {
+ ctx.pathList.clear();
fillPath(ctx, pageIndex);
return;
}
@@ -611,100 +617,11 @@
ctx.pathList.moveLast();
deleteTuple(pageId, tupleIndex, ctx);
- while (true) {
- if (ctx.splitKey.getLeftPageBuffer() != null) {
- updateParentForDelete(ctx);
- } else {
- break;
- }
- }
-
ctx.leafFrame.getPage().releaseWriteLatch();
bufferCache.unpin(ctx.leafFrame.getPage());
}
}
- private void updateParentForDelete(RTreeOpContext ctx) throws HyracksDataException {
- boolean writeLatched = false;
- int parentId = ctx.pathList.getLastPageId();
- ICachedPage parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
- parentNode.acquireWriteLatch();
- writeLatched = true;
- ctx.interiorFrame.setPage(parentNode);
- boolean foundParent = true;
- int tupleIndex = -1;
-
- try {
- if (ctx.interiorFrame.getPageLsn() != ctx.pathList.getLastPageLsn()) {
- foundParent = false;
- while (true) {
- tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.cmp);
- if (tupleIndex != -1) {
- // found the parent
- foundParent = true;
- break;
- }
- int rightPage = ctx.interiorFrame.getRightPage();
- parentNode.releaseWriteLatch();
- writeLatched = false;
- bufferCache.unpin(parentNode);
-
- if (rightPage == -1) {
- break;
- }
-
- parentId = rightPage;
- parentNode = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, parentId), false);
- parentNode.acquireWriteLatch();
- writeLatched = true;
- ctx.interiorFrame.setPage(parentNode);
- }
- }
- if (foundParent) {
- if (tupleIndex == -1) {
- tupleIndex = ctx.interiorFrame.findTupleByPointer(ctx.splitKey.getLeftTuple(), ctx.cmp);
- }
- boolean recomputeMBR = ctx.interiorFrame.recomputeMBR(ctx.splitKey.getLeftTuple(), tupleIndex, ctx.cmp);
-
- if (recomputeMBR) {
- ctx.interiorFrame.adjustKey(ctx.splitKey.getLeftTuple(), tupleIndex, ctx.cmp);
- ctx.pathList.moveLast();
-
- incrementGlobalNsn();
- ctx.interiorFrame.setPageLsn(getGlobalNsn());
-
- ctx.splitKey.reset();
- if (!ctx.pathList.isEmpty()) {
- ctx.interiorFrame.computeMBR(ctx.splitKey);
- ctx.splitKey.setLeftPage(parentId);
- }
- } else {
- ctx.pathList.moveLast();
- ctx.splitKey.reset();
- }
-
- parentNode.releaseWriteLatch();
- writeLatched = false;
- bufferCache.unpin(parentNode);
- return;
- }
- } finally {
- if (writeLatched) {
- parentNode.releaseWriteLatch();
- writeLatched = false;
- bufferCache.unpin(parentNode);
- }
- }
-
- // very rare situation when the there is a root split, do an exhaustive
- // breadth-first traversal looking for the parent tuple
-
- ctx.pathList.clear();
- ctx.traverseList.clear();
- findPath(ctx);
- updateParentForDelete(ctx);
- }
-
private int findTupleToDelete(RTreeOpContext ctx) throws HyracksDataException {
boolean writeLatched = false;
boolean readLatched = false;
@@ -808,14 +725,7 @@
private void deleteTuple(int pageId, int tupleIndex, RTreeOpContext ctx) throws HyracksDataException {
ctx.leafFrame.delete(tupleIndex, ctx.cmp);
- incrementGlobalNsn();
- ctx.leafFrame.setPageLsn(getGlobalNsn());
-
- // if the page is empty, just leave it there for future inserts
- if (pageId != rootPage && ctx.leafFrame.getTupleCount() > 0) {
- ctx.leafFrame.computeMBR(ctx.splitKey);
- ctx.splitKey.setLeftPage(pageId);
- }
+ ctx.leafFrame.setPageLsn(incrementGlobalNsn());
}
private void search(ITreeIndexCursor cursor, ISearchPredicate searchPred, RTreeOpContext ctx)
@@ -1000,5 +910,9 @@
ctx.reset(IndexOp.DISKORDERSCAN);
rtree.diskOrderScan(cursor, ctx);
}
+
+ public RTreeOpContext getOpContext() {
+ return ctx;
+ }
}
}
\ No newline at end of file
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
index 8062a08..6683444 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTreeOpContext.java
@@ -15,6 +15,8 @@
package edu.uci.ics.hyracks.storage.am.rtree.impls;
+import java.util.ArrayList;
+
import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
import edu.uci.ics.hyracks.storage.am.common.api.IIndexOpContext;
@@ -24,6 +26,7 @@
import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeInteriorFrame;
import edu.uci.ics.hyracks.storage.am.rtree.api.IRTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
public class RTreeOpContext implements IIndexOpContext {
private static final int INITIAL_TRAVERSE_LIST_SIZE = 100;
@@ -41,6 +44,9 @@
// Used for traversing the tree.
public PathList traverseList;
+ public ArrayList<ICachedPage> NSNUpdates;
+ public ArrayList<ICachedPage> LSNUpdates;
+
public RTreeOpContext(IRTreeLeafFrame leafFrame, IRTreeInteriorFrame interiorFrame,
ITreeIndexMetaDataFrame metaFrame, IBinaryComparatorFactory[] cmpFactories, int treeHeightHint) {
this.cmp = MultiComparator.create(cmpFactories);
@@ -48,6 +54,8 @@
this.leafFrame = leafFrame;
this.metaFrame = metaFrame;
pathList = new PathList(treeHeightHint, treeHeightHint);
+ NSNUpdates = new ArrayList<ICachedPage>();
+ LSNUpdates = new ArrayList<ICachedPage>();
}
public ITupleReference getTuple() {
@@ -65,6 +73,8 @@
if (traverseList != null) {
traverseList.clear();
}
+ NSNUpdates.clear();
+ LSNUpdates.clear();
}
@Override
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
index ebfb4b4..a2dcb6d 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/UnorderedSlotManager.java
@@ -95,25 +95,29 @@
setSlot(slotOff, tupleOff);
}
+ public void deleteSlot(int slotOff) {
+ System.arraycopy(frame.getBuffer().array(), getSlotEndOff(), frame.getBuffer().array(), slotOff + slotSize,
+ slotSize);
+ }
+
public void deleteEmptySlots() {
int slotOff = getSlotStartOff();
- while (slotOff >= getSlotEndOff()) {
- if (frame.getBuffer().getInt(slotOff) == -1) {
- while (frame.getBuffer().getInt(getSlotEndOff()) == -1) {
- ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
- if (getSlotEndOff() > getSlotStartOff()) {
- break;
- }
- }
- if (slotOff > getSlotEndOff()) {
- System.arraycopy(frame.getBuffer().array(), getSlotEndOff(), frame.getBuffer().array(), slotOff,
- slotSize);
- ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
- } else {
+ while (frame.getTupleCount() > 0) {
+ while (frame.getBuffer().getInt(getSlotEndOff()) == -1) {
+ ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
+ if (frame.getTupleCount() == 0) {
break;
}
}
+ if (frame.getTupleCount() == 0 || slotOff <= getSlotEndOff()) {
+ break;
+ }
+ if (frame.getBuffer().getInt(slotOff) == -1) {
+ modifySlot(slotOff, frame.getBuffer().getInt(getSlotEndOff()));
+ ((RTreeNSMFrame) frame).setTupleCount(frame.getTupleCount() - 1);
+ }
slotOff -= slotSize;
+
}
}
}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tests/AbstractRTreeMultiThreadTest.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tests/AbstractRTreeMultiThreadTest.java
new file mode 100644
index 0000000..02e6158
--- /dev/null
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tests/AbstractRTreeMultiThreadTest.java
@@ -0,0 +1,152 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.tests;
+
+import java.util.ArrayList;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.test.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.test.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.common.test.TreeIndexMultiThreadTestDriver;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+@SuppressWarnings("rawtypes")
+public abstract class AbstractRTreeMultiThreadTest {
+
+ protected final Logger LOGGER = Logger.getLogger(AbstractRTreeMultiThreadTest.class.getName());
+
+ // Machine-specific number of threads to use for testing.
+ protected final int REGULAR_NUM_THREADS = Runtime.getRuntime().availableProcessors();
+ // Excessive number of threads for testing.
+ protected final int EXCESSIVE_NUM_THREADS = Runtime.getRuntime().availableProcessors() * 4;
+ protected final int NUM_OPERATIONS = 10000;
+
+ protected ArrayList<TestWorkloadConf> workloadConfs = getTestWorkloadConf();
+
+ protected abstract void setUp() throws HyracksException;
+
+ protected abstract void tearDown() throws HyracksDataException;
+
+ protected abstract ITreeIndex createTreeIndex(ITypeTraits[] typeTraits,
+ IBinaryComparatorFactory[] rtreeCmpFactories, IBinaryComparatorFactory[] btreeCmpFactories,
+ IPrimitiveValueProviderFactory[] valueProviderFactories) throws TreeIndexException;
+
+ protected abstract int getFileId();
+
+ protected abstract ITreeIndexTestWorkerFactory getWorkerFactory();
+
+ protected abstract ArrayList<TestWorkloadConf> getTestWorkloadConf();
+
+ protected abstract String getIndexTypeName();
+
+ protected static float[] getUniformOpProbs(TestOperation[] ops) {
+ float[] opProbs = new float[ops.length];
+ for (int i = 0; i < ops.length; i++) {
+ opProbs[i] = 1.0f / (float) ops.length;
+ }
+ return opProbs;
+ }
+
+ protected void runTest(ISerializerDeserializer[] fieldSerdes,
+ IPrimitiveValueProviderFactory[] valueProviderFactories, int numKeys, int numThreads,
+ TestWorkloadConf conf, String dataMsg) throws HyracksException, InterruptedException, TreeIndexException {
+ setUp();
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ String indexTypeName = getIndexTypeName();
+ LOGGER.info(indexTypeName + " MultiThread Test:\nData: " + dataMsg + "; Threads: " + numThreads
+ + "; Workload: " + conf.toString() + ".");
+ }
+
+ ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes);
+ IBinaryComparatorFactory[] rtreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes, numKeys);
+ IBinaryComparatorFactory[] btreeCmpFactories = SerdeUtils.serdesToComparatorFactories(fieldSerdes,
+ fieldSerdes.length);
+
+ ITreeIndex index = createTreeIndex(typeTraits, rtreeCmpFactories, btreeCmpFactories, valueProviderFactories);
+ ITreeIndexTestWorkerFactory workerFactory = getWorkerFactory();
+
+ // 4 batches per thread.
+ int batchSize = (NUM_OPERATIONS / numThreads) / 4;
+
+ TreeIndexMultiThreadTestDriver driver = new TreeIndexMultiThreadTestDriver(index, workerFactory, fieldSerdes,
+ conf.ops, conf.opProbs);
+ driver.init(getFileId());
+ long[] times = driver.run(numThreads, 1, NUM_OPERATIONS, batchSize);
+ driver.deinit();
+
+ if (LOGGER.isLoggable(Level.INFO)) {
+ LOGGER.info("RTree MultiThread Test Time: " + times[0] + "ms");
+ }
+
+ tearDown();
+ }
+
+ @Test
+ public void twoDimensionsInt() throws InterruptedException, HyracksException, TreeIndexException {
+ ISerializerDeserializer[] fieldSerdes = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+ int numKeys = 4;
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ numKeys, IntegerPointable.FACTORY);
+
+ String dataMsg = "Two Dimensions Of Integer Values";
+
+ for (TestWorkloadConf conf : workloadConfs) {
+ runTest(fieldSerdes, valueProviderFactories, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+ runTest(fieldSerdes, valueProviderFactories, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+ }
+ }
+
+ //@Test
+ public void fourDimensionsDouble() throws InterruptedException, HyracksException, TreeIndexException {
+ ISerializerDeserializer[] fieldSerdes = { DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+ DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+
+ int numKeys = 8;
+ IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+ numKeys, DoublePointable.FACTORY);
+
+ String dataMsg = "Four Dimensions Of Double Values";
+
+ for (TestWorkloadConf conf : workloadConfs) {
+ runTest(fieldSerdes, valueProviderFactories, numKeys, REGULAR_NUM_THREADS, conf, dataMsg);
+ runTest(fieldSerdes, valueProviderFactories, numKeys, EXCESSIVE_NUM_THREADS, conf, dataMsg);
+ }
+ }
+}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tests/RTreeTestUtils.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tests/RTreeTestUtils.java
index 52228f4..66cd3e9 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tests/RTreeTestUtils.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/tests/RTreeTestUtils.java
@@ -66,39 +66,6 @@
}
@SuppressWarnings("unchecked")
- public void insertIntDeleteTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
- int fieldCount = ctx.getFieldCount();
- int numKeyFields = ctx.getKeyFieldCount();
- int[] fieldValues = new int[ctx.getFieldCount()];
- // Scale range of values according to number of keys.
- // For example, for 2 keys we want the square root of numTuples, for 3
- // keys the cube root of numTuples, etc.
- int maxValue = (int) Math.ceil(Math.pow(numTuples, 1.0 / (double) numKeyFields));
- for (int i = 0; i < numTuples; i++) {
- // Set keys.
- setIntKeyFields(fieldValues, numKeyFields, maxValue, rnd);
- // Set values.
- for (int j = numKeyFields; j < fieldCount; j++) {
- fieldValues[j] = j;
- }
- TupleUtils.createIntegerTuple(ctx.getTupleBuilder(), ctx.getTuple(), fieldValues);
- if (LOGGER.isLoggable(Level.INFO)) {
- if ((i + 1) % (numTuples / Math.min(10, numTuples)) == 0) {
- LOGGER.info("Inserting Tuple " + (i + 1) + "/" + numTuples);
- }
- }
- try {
- ctx.getIndexAccessor().insert(ctx.getTuple());
- ctx.insertCheckTuple(createIntCheckTuple(fieldValues, ctx.getKeyFieldCount()), ctx.getCheckTuples());
- } catch (TreeIndexException e) {
- // We set expected values only after insertion succeeds because
- // we
- // ignore duplicate keys.
- }
- }
- }
-
- @SuppressWarnings("unchecked")
public void insertDoubleTuples(ITreeIndexTestContext ctx, int numTuples, Random rnd) throws Exception {
int fieldCount = ctx.getFieldCount();
int numKeyFields = ctx.getKeyFieldCount();
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
index 2dea763..bb66e96 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeMultiThreadTest.java
@@ -65,7 +65,7 @@
workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
// Inserts mixed with point searches and scans.
- TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.ORDERED_SCAN, TestOperation.DISKORDER_SCAN };
+ TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
// Inserts, updates, and deletes.
@@ -73,7 +73,7 @@
workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, getUniformOpProbs(insertDeleteUpdateOps)));
// All operations mixed.
- TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.ORDERED_SCAN, TestOperation.DISKORDER_SCAN };
+ TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.DISKORDER_SCAN };
workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
return workloadConfs;
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
index 0940a5a..d5fe582 100644
--- a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/multithread/BTreeTestWorker.java
@@ -96,7 +96,7 @@
consumeCursorTuples(searchCursor);
break;
- case ORDERED_SCAN:
+ case SCAN:
searchCursor.reset();
rangePred.setLowKey(null, true);
rangePred.setHighKey(null, true);
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
index 049d6dc..750340f 100644
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
+++ b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeMultiThreadTest.java
@@ -33,9 +33,9 @@
public class LSMBTreeMultiThreadTest extends OrderedIndexMultiThreadTest {
private LSMBTreeTestHarness harness = new LSMBTreeTestHarness();
-
+
private LSMBTreeTestWorkerFactory workerFactory = new LSMBTreeTestWorkerFactory();
-
+
@Override
protected void setUp() throws HyracksException {
harness.setUp();
@@ -62,35 +62,41 @@
@Override
protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
-
+
// Insert only workload.
TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
-
+
// Insert and merge workload.
TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
workloadConfs.add(new TestWorkloadConf(insertMergeOps, getUniformOpProbs(insertMergeOps)));
-
+
// Inserts mixed with point searches and scans.
- TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH, TestOperation.ORDERED_SCAN };
+ TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.POINT_SEARCH,
+ TestOperation.SCAN };
workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
-
+
// Inserts, updates, and deletes.
- TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE };
+ TestOperation[] insertDeleteUpdateOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
+ TestOperation.UPDATE };
workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateOps, getUniformOpProbs(insertDeleteUpdateOps)));
-
+
// Inserts, updates, deletes and merges.
- TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.MERGE };
- workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps, getUniformOpProbs(insertDeleteUpdateMergeOps)));
-
+ TestOperation[] insertDeleteUpdateMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
+ TestOperation.UPDATE, TestOperation.MERGE };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteUpdateMergeOps,
+ getUniformOpProbs(insertDeleteUpdateMergeOps)));
+
// All operations except merge.
- TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.ORDERED_SCAN };
+ TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
+ TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN };
workloadConfs.add(new TestWorkloadConf(allNoMergeOps, getUniformOpProbs(allNoMergeOps)));
-
+
// All operations.
- TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.ORDERED_SCAN, TestOperation.MERGE };
+ TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
+ TestOperation.UPDATE, TestOperation.POINT_SEARCH, TestOperation.SCAN, TestOperation.MERGE };
workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
-
+
return workloadConfs;
}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
index 120cee4..8522fb5 100644
--- a/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
+++ b/hyracks-tests/hyracks-storage-am-lsm-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/btree/multithread/LSMBTreeTestWorker.java
@@ -97,7 +97,7 @@
consumeCursorTuples(searchCursor);
break;
- case ORDERED_SCAN:
+ case SCAN:
searchCursor.reset();
rangePred.setLowKey(null, true);
rangePred.setHighKey(null, true);
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMTreeFileManagerTest.java b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMTreeFileManagerTest.java
index 162cd93..76654db 100644
--- a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMTreeFileManagerTest.java
+++ b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMTreeFileManagerTest.java
@@ -49,8 +49,8 @@
protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
protected final static String sep = System.getProperty("file.separator");
protected IOManager ioManager;
- protected String baseDir;
-
+ protected String baseDir;
+
@Before
public void setUp() throws HyracksException {
TestStorageManagerComponentHolder.init(DEFAULT_PAGE_SIZE, DEFAULT_NUM_PAGES, DEFAULT_MAX_OPEN_FILES);
@@ -59,7 +59,7 @@
File f = new File(baseDir);
f.mkdirs();
}
-
+
@After
public void tearDown() throws HyracksDataException {
File f = new File(baseDir);
@@ -73,13 +73,13 @@
int numFileNames = 100;
long sleepTime = 5;
for (int i = 0; i < numFileNames; i++) {
- String flushFileName = fileManager.getFlushFileName();
+ String flushFileName = (String) fileManager.getFlushFileName();
if (testFlushFileName) {
fileNames.addFirst(flushFileName);
}
Thread.sleep(sleepTime);
if (!testFlushFileName) {
- String secondFlushFileName = fileManager.getFlushFileName();
+ String secondFlushFileName = (String) fileManager.getFlushFileName();
String mergeFileName = getMergeFileName(fileManager, flushFileName, secondFlushFileName);
fileNames.addFirst(mergeFileName);
Thread.sleep(sleepTime);
@@ -103,26 +103,26 @@
sortOrderTest(true);
sortOrderTest(false);
}
-
+
public void cleanInvalidFilesTest(IOManager ioManager) throws InterruptedException, IOException {
ILSMFileManager fileManager = new LSMTreeFileManager(ioManager, baseDir);
fileManager.createDirs();
-
+
List<FileReference> flushFiles = new ArrayList<FileReference>();
List<FileReference> allFiles = new ArrayList<FileReference>();
-
+
int numFileNames = 100;
long sleepTime = 5;
// Generate a bunch of flush files.
for (int i = 0; i < numFileNames; i++) {
- FileReference flushTempFile = fileManager.createTempFile();
- String flushFileName = fileManager.getFlushFileName();
+ FileReference flushTempFile = fileManager.createTempFile();
+ String flushFileName = (String) fileManager.getFlushFileName();
FileReference flushFile = fileManager.rename(flushTempFile, flushFileName);
flushFiles.add(flushFile);
- Thread.sleep(sleepTime);
+ Thread.sleep(sleepTime);
}
allFiles.addAll(flushFiles);
-
+
// Simulate merging some of the flush files.
// Merge range 0 to 4.
FileReference mergeFile1 = simulateMerge(fileManager, flushFiles.get(0), flushFiles.get(4));
@@ -139,18 +139,18 @@
// Merge range 50 to 79.
FileReference mergeFile5 = simulateMerge(fileManager, flushFiles.get(50), flushFiles.get(79));
allFiles.add(mergeFile5);
-
+
// Simulate merging of merge files.
FileReference mergeFile6 = simulateMerge(fileManager, mergeFile1, mergeFile2);
allFiles.add(mergeFile6);
FileReference mergeFile7 = simulateMerge(fileManager, mergeFile3, mergeFile4);
allFiles.add(mergeFile7);
-
+
// Set delete on exit for all files.
- for (FileReference fileRef : allFiles) {
+ for (FileReference fileRef : allFiles) {
fileRef.getFile().deleteOnExit();
}
-
+
// Populate expected valid flush files.
List<String> expectedValidFiles = new ArrayList<String>();
for (int i = 30; i < 50; i++) {
@@ -159,7 +159,7 @@
for (int i = 80; i < 100; i++) {
expectedValidFiles.add(flushFiles.get(i).getFile().getName());
}
-
+
// Populate expected valid merge files.
expectedValidFiles.add(mergeFile5.getFile().getName());
expectedValidFiles.add(mergeFile6.getFile().getName());
@@ -167,19 +167,19 @@
// Sort expected files.
Collections.sort(expectedValidFiles, fileManager.getFileNameComparator());
-
- List<String> validFiles = fileManager.cleanupAndGetValidFiles();
-
+
+ List<Object> validFiles = fileManager.cleanupAndGetValidFiles();
+
// Check actual files against expected files.
assertEquals(expectedValidFiles.size(), validFiles.size());
for (int i = 0; i < expectedValidFiles.size(); i++) {
- File f = new File(validFiles.get(i));
+ File f = new File((String) validFiles.get(i));
assertEquals(expectedValidFiles.get(i), f.getName());
}
-
+
// Make sure invalid files were removed from all IODevices.
ArrayList<String> remainingFiles = new ArrayList<String>();
- for(IODeviceHandle dev : ioManager.getIODevices()) {
+ for (IODeviceHandle dev : ioManager.getIODevices()) {
File dir = new File(dev.getPath(), baseDir);
FilenameFilter filter = new FilenameFilter() {
public boolean accept(File dir, String name) {
@@ -192,38 +192,38 @@
remainingFiles.add(f.getName());
}
}
-
+
Collections.sort(remainingFiles, fileManager.getFileNameComparator());
// Check actual files in directory against expected files.
assertEquals(expectedValidFiles.size(), remainingFiles.size());
for (int i = 0; i < expectedValidFiles.size(); i++) {
- assertEquals(expectedValidFiles.get(i), remainingFiles.get(i));
+ assertEquals(expectedValidFiles.get(i), remainingFiles.get(i));
}
}
-
+
@Test
public void singleIODeviceTest() throws InterruptedException, IOException {
IOManager singleDeviceIOManager = createIOManager(1);
cleanInvalidFilesTest(singleDeviceIOManager);
cleanDirs(singleDeviceIOManager);
}
-
+
@Test
public void twoIODevicesTest() throws InterruptedException, IOException {
IOManager twoDevicesIOManager = createIOManager(2);
cleanInvalidFilesTest(twoDevicesIOManager);
cleanDirs(twoDevicesIOManager);
}
-
+
@Test
public void fourIODevicesTest() throws InterruptedException, IOException {
IOManager fourDevicesIOManager = createIOManager(4);
cleanInvalidFilesTest(fourDevicesIOManager);
cleanDirs(fourDevicesIOManager);
}
-
+
private void cleanDirs(IOManager ioManager) {
- for(IODeviceHandle dev : ioManager.getIODevices()) {
+ for (IODeviceHandle dev : ioManager.getIODevices()) {
File dir = new File(dev.getPath(), baseDir);
FilenameFilter filter = new FilenameFilter() {
public boolean accept(File dir, String name) {
@@ -237,7 +237,7 @@
}
}
}
-
+
private IOManager createIOManager(int numDevices) throws HyracksException {
List<IODeviceHandle> devices = new ArrayList<IODeviceHandle>();
for (int i = 0; i < numDevices; i++) {
@@ -246,17 +246,19 @@
}
return new IOManager(devices, Executors.newCachedThreadPool());
}
-
- private FileReference simulateMerge(ILSMFileManager fileManager, FileReference a, FileReference b) throws HyracksDataException {
+
+ private FileReference simulateMerge(ILSMFileManager fileManager, FileReference a, FileReference b)
+ throws HyracksDataException {
FileReference tempMergeFile = fileManager.createTempFile();
- String mergeFileName = fileManager.getMergeFileName(a.getFile().getName(), b.getFile().getName());
+ String mergeFileName = (String) fileManager.getMergeFileName(a.getFile().getName(), b.getFile().getName());
FileReference mergeFile = fileManager.rename(tempMergeFile, mergeFileName);
return mergeFile;
}
-
- private String getMergeFileName(ILSMFileManager fileNameManager, String firstFile, String lastFile) throws HyracksDataException {
+
+ private String getMergeFileName(ILSMFileManager fileNameManager, String firstFile, String lastFile)
+ throws HyracksDataException {
File f1 = new File(firstFile);
File f2 = new File(lastFile);
- return fileNameManager.getMergeFileName(f1.getName(), f2.getName());
+ return (String) fileNameManager.getMergeFileName(f1.getName(), f2.getName());
}
}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
new file mode 100644
index 0000000..d486dd0
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeMultiThreadTest.java
@@ -0,0 +1,114 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.exceptions.HyracksException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.test.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.test.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.util.LSMRTreeTestHarness;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.utils.LSMRTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.tests.AbstractRTreeMultiThreadTest;
+
+public class LSMRTreeMultiThreadTest extends AbstractRTreeMultiThreadTest {
+
+ private LSMRTreeTestHarness harness = new LSMRTreeTestHarness();
+
+ private LSMRTreeTestWorkerFactory workerFactory = new LSMRTreeTestWorkerFactory();
+
+ @Override
+ protected void setUp() throws HyracksException {
+ harness.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+ IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories)
+ throws TreeIndexException {
+ return LSMRTreeUtils.createLSMTree(harness.getMemBufferCache(), harness.getMemFreePageManager(),
+ harness.getIOManager(), harness.getOnDiskDir(), harness.getDiskBufferCache(),
+ harness.getDiskFileMapProvider(), typeTraits, rtreeCmpFactories, btreeCmpFactories,
+ valueProviderFactories);
+
+ }
+
+ @Override
+ protected ITreeIndexTestWorkerFactory getWorkerFactory() {
+ return workerFactory;
+ }
+
+ @Override
+ protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
+ ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
+
+ // Insert only workload.
+ TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
+ workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
+
+ // Insert and merge workload.
+ TestOperation[] insertMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.MERGE };
+ workloadConfs.add(new TestWorkloadConf(insertMergeOps, getUniformOpProbs(insertMergeOps)));
+
+ // Inserts mixed with scans.
+ TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN };
+ workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
+
+ // Inserts and deletes.
+ TestOperation[] insertDeleteOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteOps, getUniformOpProbs(insertDeleteOps)));
+
+ // Inserts, deletes and merges.
+ TestOperation[] insertDeleteMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
+ TestOperation.MERGE };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteMergeOps, getUniformOpProbs(insertDeleteMergeOps)));
+
+ // All operations except merge.
+ TestOperation[] allNoMergeOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE,
+ TestOperation.SCAN };
+ workloadConfs.add(new TestWorkloadConf(allNoMergeOps, getUniformOpProbs(allNoMergeOps)));
+
+ // All operations.
+ TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN,
+ TestOperation.MERGE };
+ workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
+
+ return workloadConfs;
+ }
+
+ @Override
+ protected int getFileId() {
+ return harness.getFileId();
+ }
+
+ @Override
+ protected String getIndexTypeName() {
+ return "LSMRTree";
+ }
+
+}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
new file mode 100644
index 0000000..37e5c96
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorker.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.test.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMMergeInProgressException;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree.LSMRTreeAccessor;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+
+public class LSMRTreeTestWorker extends AbstractTreeIndexTestWorker {
+
+ private final LSMRTree lsmRTree;
+ private final int numFields;
+ private final ArrayTupleBuilder rearrangedTb;
+ private final ArrayTupleReference rearrangedTuple = new ArrayTupleReference();
+
+ public LSMRTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+ super(dataGen, opSelector, index, numBatches);
+ lsmRTree = (LSMRTree) index;
+ numFields = lsmRTree.getFieldCount();
+ rearrangedTb = new ArrayTupleBuilder(numFields);
+ }
+
+ @Override
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, TreeIndexException {
+ LSMRTreeAccessor accessor = (LSMRTreeAccessor) indexAccessor;
+ ITreeIndexCursor searchCursor = accessor.createSearchCursor();
+ MultiComparator cmp = accessor.getMultiComparator();
+ SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
+
+ switch (op) {
+ case INSERT:
+ rearrangeTuple(tuple, cmp);
+ accessor.insert(rearrangedTuple);
+ break;
+
+ case DELETE:
+ rearrangeTuple(tuple, cmp);
+ accessor.delete(rearrangedTuple);
+ break;
+
+ case SCAN:
+ searchCursor.reset();
+ rangePred.setSearchKey(null);
+ accessor.search(searchCursor, rangePred);
+ consumeCursorTuples(searchCursor);
+ break;
+
+ case MERGE:
+ try {
+ accessor.merge();
+ } catch (LSMMergeInProgressException e) {
+ // Ignore ongoing merges. Do an insert instead.
+ rearrangeTuple(tuple, cmp);
+ accessor.insert(rearrangedTuple);
+ }
+ break;
+
+ default:
+ throw new HyracksDataException("Op " + op.toString() + " not supported.");
+ }
+ }
+
+ private void rearrangeTuple(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+ // Create a tuple with rearranged key values to make sure lower points
+ // have larger coordinates than high points.
+ rearrangedTb.reset();
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ if (c > 0) {
+ rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ } else {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ }
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ if (c > 0) {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ } else {
+ rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ }
+ }
+ for (int i = cmp.getKeyFieldCount(); i < numFields; i++) {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
+ }
+
+ private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
+ try {
+ while (cursor.hasNext()) {
+ cursor.next();
+ }
+ } finally {
+ cursor.close();
+ }
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorkerFactory.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorkerFactory.java
new file mode 100644
index 0000000..50774c1
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/multithread/LSMRTreeTestWorkerFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.lsm.rtree.multithread;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.test.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.test.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector;
+
+public class LSMRTreeTestWorkerFactory implements ITreeIndexTestWorkerFactory {
+ @Override
+ public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
+ ITreeIndex index, int numBatches) {
+ return new LSMRTreeTestWorker(dataGen, opSelector, index, numBatches);
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
new file mode 100644
index 0000000..5050a59
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeMultiThreadTest.java
@@ -0,0 +1,97 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import java.util.ArrayList;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.test.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.common.test.TestWorkloadConf;
+import edu.uci.ics.hyracks.storage.am.rtree.tests.AbstractRTreeMultiThreadTest;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+import edu.uci.ics.hyracks.storage.am.rtree.utils.RTreeTestHarness;
+
+public class RTreeMultiThreadTest extends AbstractRTreeMultiThreadTest {
+
+ private RTreeTestHarness harness = new RTreeTestHarness();
+
+ private RTreeTestWorkerFactory workerFactory = new RTreeTestWorkerFactory();
+
+ @Override
+ protected void setUp() throws HyracksDataException {
+ harness.setUp();
+ }
+
+ @Override
+ protected void tearDown() throws HyracksDataException {
+ harness.tearDown();
+ }
+
+ @Override
+ protected ITreeIndex createTreeIndex(ITypeTraits[] typeTraits, IBinaryComparatorFactory[] rtreeCmpFactories,
+ IBinaryComparatorFactory[] btreeCmpFactories, IPrimitiveValueProviderFactory[] valueProviderFactories)
+ throws TreeIndexException {
+ return RTreeUtils.createRTree(harness.getBufferCache(), harness.getTreeFileId(), typeTraits,
+ valueProviderFactories, rtreeCmpFactories);
+
+ }
+
+ @Override
+ protected ITreeIndexTestWorkerFactory getWorkerFactory() {
+ return workerFactory;
+ }
+
+ @Override
+ protected ArrayList<TestWorkloadConf> getTestWorkloadConf() {
+ ArrayList<TestWorkloadConf> workloadConfs = new ArrayList<TestWorkloadConf>();
+
+ // Insert only workload.
+ TestOperation[] insertOnlyOps = new TestOperation[] { TestOperation.INSERT };
+ workloadConfs.add(new TestWorkloadConf(insertOnlyOps, getUniformOpProbs(insertOnlyOps)));
+
+ // Inserts mixed with scans.
+ TestOperation[] insertSearchOnlyOps = new TestOperation[] { TestOperation.INSERT, TestOperation.SCAN,
+ TestOperation.DISKORDER_SCAN };
+ workloadConfs.add(new TestWorkloadConf(insertSearchOnlyOps, getUniformOpProbs(insertSearchOnlyOps)));
+
+ // Inserts and deletes.
+ TestOperation[] insertDeleteOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE };
+ workloadConfs.add(new TestWorkloadConf(insertDeleteOps, getUniformOpProbs(insertDeleteOps)));
+
+ // All operations mixed.
+ TestOperation[] allOps = new TestOperation[] { TestOperation.INSERT, TestOperation.DELETE, TestOperation.SCAN,
+ TestOperation.DISKORDER_SCAN };
+ workloadConfs.add(new TestWorkloadConf(allOps, getUniformOpProbs(allOps)));
+
+ return workloadConfs;
+ }
+
+ @Override
+ protected int getFileId() {
+ return harness.getTreeFileId();
+ }
+
+ @Override
+ protected String getIndexTypeName() {
+ return "RTree";
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
new file mode 100644
index 0000000..72af5f5
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorker.java
@@ -0,0 +1,124 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.test.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector.TestOperation;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.RTree;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+
+public class RTreeTestWorker extends AbstractTreeIndexTestWorker {
+
+ private final RTree rtree;
+ private final int numFields;
+ private final ArrayTupleReference rearrangedTuple = new ArrayTupleReference();
+ private final ArrayTupleBuilder rearrangedTb;
+
+ public RTreeTestWorker(DataGenThread dataGen, TestOperationSelector opSelector, ITreeIndex index, int numBatches) {
+ super(dataGen, opSelector, index, numBatches);
+ rtree = (RTree) index;
+ numFields = rtree.getFieldCount();
+ rearrangedTb = new ArrayTupleBuilder(numFields);
+ }
+
+ @Override
+ public void performOp(ITupleReference tuple, TestOperation op) throws HyracksDataException, TreeIndexException {
+ RTree.RTreeAccessor accessor = (RTree.RTreeAccessor) indexAccessor;
+ ITreeIndexCursor searchCursor = accessor.createSearchCursor();
+ ITreeIndexCursor diskOrderScanCursor = accessor.createDiskOrderScanCursor();
+ MultiComparator cmp = accessor.getOpContext().cmp;
+ SearchPredicate rangePred = new SearchPredicate(tuple, cmp);
+
+ switch (op) {
+ case INSERT:
+ rearrangeTuple(tuple, cmp);
+ accessor.insert(rearrangedTuple);
+ break;
+
+ case DELETE:
+ rearrangeTuple(tuple, cmp);
+ accessor.delete(rearrangedTuple);
+ break;
+
+ case SCAN:
+ searchCursor.reset();
+ rangePred.setSearchKey(null);
+ accessor.search(searchCursor, rangePred);
+ consumeCursorTuples(searchCursor);
+ break;
+
+ case DISKORDER_SCAN:
+ diskOrderScanCursor.reset();
+ accessor.diskOrderScan(diskOrderScanCursor);
+ consumeCursorTuples(diskOrderScanCursor);
+ break;
+
+ default:
+ throw new HyracksDataException("Op " + op.toString() + " not supported.");
+ }
+ }
+
+ private void rearrangeTuple(ITupleReference tuple, MultiComparator cmp) throws HyracksDataException {
+ // Create a tuple with rearranged key values to make sure lower points
+ // have larger coordinates than high points.
+ rearrangedTb.reset();
+ int maxFieldPos = cmp.getKeyFieldCount() / 2;
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ if (c > 0) {
+ rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ } else {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ }
+ for (int i = 0; i < maxFieldPos; i++) {
+ int j = maxFieldPos + i;
+ int c = cmp.getComparators()[i].compare(tuple.getFieldData(i), tuple.getFieldStart(i),
+ tuple.getFieldLength(i), tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ if (c > 0) {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ } else {
+ rearrangedTb.addField(tuple.getFieldData(j), tuple.getFieldStart(j), tuple.getFieldLength(j));
+ }
+ }
+ for (int i = cmp.getKeyFieldCount(); i < numFields; i++) {
+ rearrangedTb.addField(tuple.getFieldData(i), tuple.getFieldStart(i), tuple.getFieldLength(i));
+ }
+ rearrangedTuple.reset(rearrangedTb.getFieldEndOffsets(), rearrangedTb.getByteArray());
+ }
+
+ private void consumeCursorTuples(ITreeIndexCursor cursor) throws HyracksDataException {
+ try {
+ while (cursor.hasNext()) {
+ cursor.next();
+ }
+ } finally {
+ cursor.close();
+ }
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
new file mode 100644
index 0000000..822bf76
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/multithread/RTreeTestWorkerFactory.java
@@ -0,0 +1,30 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.rtree.multithread;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.common.test.AbstractTreeIndexTestWorker;
+import edu.uci.ics.hyracks.storage.am.common.test.ITreeIndexTestWorkerFactory;
+import edu.uci.ics.hyracks.storage.am.common.test.TestOperationSelector;
+
+public class RTreeTestWorkerFactory implements ITreeIndexTestWorkerFactory {
+ @Override
+ public AbstractTreeIndexTestWorker create(DataGenThread dataGen, TestOperationSelector opSelector,
+ ITreeIndex index, int numBatches) {
+ return new RTreeTestWorker(dataGen, opSelector, index, numBatches);
+ }
+}
diff --git a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
index c82d6e4..d8e9eab 100644
--- a/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
+++ b/hyracks-tests/hyracks-storage-am-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/rtree/utils/RTreeTestHarness.java
@@ -32,7 +32,7 @@
private static final long RANDOM_SEED = 50;
private static final int DEFAULT_PAGE_SIZE = 256;
- private static final int DEFAULT_NUM_PAGES = 10;
+ private static final int DEFAULT_NUM_PAGES = 100;
private static final int DEFAULT_MAX_OPEN_FILES = 10;
private static final int DEFAULT_HYRACKS_FRAME_SIZE = 128;