Added more test cases and did more cleaning.

git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_lsm_tree@1068 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileNameManager.java b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeFileNameManager.java
similarity index 92%
rename from hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileNameManager.java
rename to hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeFileNameManager.java
index 86cbf78..628fbf3 100644
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeFileNameManager.java
+++ b/hyracks-storage-am-lsm-common/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/common/impls/LSMTreeFileNameManager.java
@@ -13,7 +13,7 @@
  * limitations under the License.
  */
 
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
+package edu.uci.ics.hyracks.storage.am.lsm.common.impls;
 
 import java.text.Format;
 import java.text.SimpleDateFormat;
@@ -22,13 +22,13 @@
 
 import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMFileNameManager;
 
-public class LSMRTreeFileNameManager implements ILSMFileNameManager {
+public class LSMTreeFileNameManager implements ILSMFileNameManager {
 
     private final String baseDir;
     private final Format formatter = new SimpleDateFormat("yyyy_MM_dd_HH_mm_ss_SSS");
     private final Comparator<String> cmp = new FileNameComparator();
     
-    public LSMRTreeFileNameManager(String baseDir) {
+    public LSMTreeFileNameManager(String baseDir) {
         if (!baseDir.endsWith(System.getProperty("file.separator"))) {
             baseDir += System.getProperty("file.separator");
         }
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
index 1c65cd2..7b59f1b 100644
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
+++ b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTree.java
@@ -1,6 +1,9 @@
 package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
 
 import java.io.File;
+import java.io.FilenameFilter;
+import java.util.Arrays;
+import java.util.Comparator;
 import java.util.LinkedList;
 import java.util.ListIterator;
 
@@ -22,6 +25,7 @@
 import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMFileNameManager;
 import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMTree;
 import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
 import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeBulkLoadContext;
@@ -44,7 +48,7 @@
     private int btreeFileId;
 
     // On-disk components.
-    private final String onDiskDir;
+    private final ILSMFileNameManager fileNameManager;
     private final RTreeFactory diskRTreeFactory;
     private final BTreeFactory diskBTreeFactory;
     private final IBufferCache diskBufferCache;
@@ -70,7 +74,7 @@
     public LSMRTree(IBufferCache memBufferCache, InMemoryFreePageManager memFreePageManager,
             ITreeIndexFrameFactory rtreeInteriorFrameFactory, ITreeIndexFrameFactory rtreeLeafFrameFactory,
             ITreeIndexFrameFactory btreeInteriorFrameFactory, ITreeIndexFrameFactory btreeLeafFrameFactory,
-            String onDiskDir, RTreeFactory diskRTreeFactory, BTreeFactory diskBTreeFactory,
+            ILSMFileNameManager fileNameManager, RTreeFactory diskRTreeFactory, BTreeFactory diskBTreeFactory,
             IFileMapProvider diskFileMapProvider, int fieldCount, MultiComparator rtreeCmp, MultiComparator btreeCmp) {
 
         memRTree = new RTree(memBufferCache, fieldCount, rtreeCmp, memFreePageManager, rtreeInteriorFrameFactory,
@@ -93,10 +97,7 @@
         this.onDiskBTreeCount = 0;
         this.threadRefCount = 0;
         this.flushFlag = false;
-        if (!onDiskDir.endsWith(System.getProperty("file.separator"))) {
-            onDiskDir += System.getProperty("file.separator");
-        }
-        this.onDiskDir = onDiskDir;
+        this.fileNameManager = fileNameManager;
         this.rtreeFileId = 0;
         this.btreeFileId = 1;
     }
@@ -108,7 +109,14 @@
 
     @Override
     public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws TreeIndexException, HyracksDataException {
-        RTree diskRTree = (RTree) createDiskTree(TreeType.rtree, diskRTreeFactory);
+        // Note that by using a flush target file name, we state that the new
+        // bulk loaded tree is "newer" than any other merged tree.
+
+        String fileName = fileNameManager.getFlushFileName();
+        RTree diskRTree = (RTree) createDiskTree(fileName + "-rtree", diskRTreeFactory, true);
+        // For each RTree, we require to have a buddy BTree. thus, we create an
+        // empty BTree. This can be optimized later.
+        createDiskTree(fileName + "-btree", diskBTreeFactory, true);
         LSMTreeBulkLoadContext bulkLoadCtx = new LSMTreeBulkLoadContext(diskRTree);
         bulkLoadCtx.beginBulkLoad(fillFactor);
         return bulkLoadCtx;
@@ -171,21 +179,81 @@
         memBTree.create(btreeFileId);
     }
 
+    /**
+     * Opens LSMRTree, assuming a consistent state of the disk-resident
+     * components. In particular, registers all files in in base dir of
+     * fileNameManager as on-disk RTrees and BTrees.
+     * 
+     * Example pathological scenario to explain "consistent state assumption":
+     * Suppose a merge finished, but before the original files were deleted the
+     * system crashes. We are left in a state where we have the original RTrees
+     * and BTrees in addition to the merged ones. We assume that prior to
+     * calling this method a separate recovery process has ensured the
+     * consistent of the disk-resident components.
+     * 
+     * @param indexFileId
+     *            Dummy file id.
+     * @throws HyracksDataException
+     */
     @Override
-    public void open(int indexFileId) {
+    public void open(int indexFileId) throws HyracksDataException {
         memRTree.open(rtreeFileId);
         memBTree.open(btreeFileId);
+        File dir = new File(fileNameManager.getBaseDir());
+        FilenameFilter rtreeFilter = new FilenameFilter() {
+            public boolean accept(File dir, String name) {
+                return !name.startsWith(".") && !name.endsWith("btree");
+            }
+        };
+        String[] rtreeFiles = dir.list(rtreeFilter);
+
+        FilenameFilter btreeFilter = new FilenameFilter() {
+            public boolean accept(File dir, String name) {
+                return !name.startsWith(".") && !name.endsWith("rtree");
+            }
+        };
+        String[] btreeFiles = dir.list(btreeFilter);
+
+        if (rtreeFiles == null || btreeFiles == null) {
+            return;
+        }
+        
+        Comparator<String> fileNameCmp = fileNameManager.getFileNameComparator();
+        Arrays.sort(rtreeFiles, fileNameCmp);
+        for (String fileName : rtreeFiles) {
+            RTree rtree = (RTree) createDiskTree(fileName, diskRTreeFactory, false);
+            onDiskRTrees.add(rtree);
+        }
+
+        Arrays.sort(btreeFiles, fileNameCmp);
+        for (String fileName : btreeFiles) {
+            BTree btree = (BTree) createDiskTree(fileName, diskBTreeFactory, false);
+            onDiskBTrees.add(btree);
+        }
     }
 
     @Override
-    public void close() {
+    public void close() throws HyracksDataException {
+        for (RTree rtree : onDiskRTrees) {
+            diskBufferCache.closeFile(rtree.getFileId());
+            rtree.close();
+        }
+        for (BTree btree : onDiskBTrees) {
+            diskBufferCache.closeFile(btree.getFileId());
+            btree.close();
+        }
+        onDiskRTrees.clear();
+        onDiskBTrees.clear();
+        onDiskRTreeCount = 0;
+        onDiskBTreeCount = 0;
         memRTree.close();
         memBTree.close();
     }
 
-    private ITreeIndex createDiskTree(TreeType treeType, TreeFactory diskTreeFactory) throws HyracksDataException {
+    private ITreeIndex createDiskTree(String fileName, TreeFactory diskTreeFactory, boolean createTree)
+            throws HyracksDataException {
         // Register the new tree file.
-        FileReference file = new FileReference(new File(getNextFileName(treeType)));
+        FileReference file = new FileReference(new File(fileName));
         // TODO: Delete the file during cleanup.
         diskBufferCache.createFile(file);
         int diskTreeFileId = diskFileMapProvider.lookupFileId(file);
@@ -193,24 +261,14 @@
         diskBufferCache.openFile(diskTreeFileId);
         // Create new tree instance.
         ITreeIndex diskTree = diskTreeFactory.createIndexInstance(diskTreeFileId);
-        diskTree.create(diskTreeFileId);
+        if (createTree) {
+            diskTree.create(diskTreeFileId);
+        }
         // TODO: Close the tree during cleanup.
         diskTree.open(diskTreeFileId);
         return diskTree;
     }
 
-    private enum TreeType {
-        rtree, btree
-    }
-
-    private String getNextFileName(TreeType treeType) {
-        if (treeType == TreeType.rtree) {
-            return onDiskDir + treeType + "_" + onDiskRTreeCount++;
-        } else {
-            return onDiskDir + treeType + "_" + onDiskBTreeCount++;
-        }
-    }
-
     @Override
     public void merge() throws Exception {
 
@@ -228,8 +286,9 @@
             btreeCursors[i] = new BTreeRangeSearchCursor((IBTreeLeafFrame) btreeLeafFrameFactory.createFrame(), false);
         }
 
-        RTree mergedRTree = (RTree) createDiskTree(TreeType.rtree, diskRTreeFactory);
-        BTree mergedBTree = (BTree) createDiskTree(TreeType.btree, diskBTreeFactory);
+        String fileName = fileNameManager.getMergeFileName();
+        RTree mergedRTree = (RTree) createDiskTree(fileName + "-rtree", diskRTreeFactory, true);
+        BTree mergedBTree = (BTree) createDiskTree(fileName + "-btree", diskBTreeFactory, true);
 
         // BulkLoad the tuples from the trees into the new merged trees.
         IIndexBulkLoadContext rtreeBulkLoadCtx = mergedRTree.beginBulkLoad(1.0f);
@@ -293,7 +352,8 @@
         ITreeIndexAccessor memRTreeAccessor = memRTree.createAccessor();
         memRTreeAccessor.search(rtreeScanCursor, rtreeNullPredicate);
 
-        RTree diskRTree = (RTree) createDiskTree(TreeType.rtree, diskRTreeFactory);
+        String fileName = fileNameManager.getFlushFileName();
+        RTree diskRTree = (RTree) createDiskTree(fileName + "-rtree", diskRTreeFactory, true);
 
         // BulkLoad the tuples from the in-memory tree into the new disk RTree.
         IIndexBulkLoadContext rtreeBulkLoadCtx = diskRTree.beginBulkLoad(1.0f);
@@ -316,7 +376,7 @@
         ITreeIndexAccessor memBTreeAccessor = memBTree.createAccessor();
         memBTreeAccessor.search(btreeScanCursor, btreeNullPredicate);
 
-        BTree diskBTree = (BTree) createDiskTree(TreeType.btree, diskBTreeFactory);
+        BTree diskBTree = (BTree) createDiskTree(fileName + "-btree", diskBTreeFactory, true);
 
         // BulkLoad the tuples from the in-memory tree into the new disk BTree.
         IIndexBulkLoadContext btreeBulkLoadCtx = diskBTree.beginBulkLoad(1.0f);
@@ -339,7 +399,7 @@
     }
 
     public void resetInMemoryTrees() throws HyracksDataException {
-        ((InMemoryFreePageManager) memFreePageManager).reset();
+        memFreePageManager.reset();
         memRTree.create(rtreeFileId);
         memBTree.create(btreeFileId);
     }
@@ -520,13 +580,12 @@
 
         @Override
         public ITreeIndexCursor createDiskOrderScanCursor() {
-            // TODO: Not implemented yet.
-            return null;
+            throw new UnsupportedOperationException("DiskOrderScan not supported by LSM-RTree.");
         }
-
+        
         @Override
         public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException {
-            throw new UnsupportedOperationException("DiskOrderScan not supported by LSMRTree");
+            throw new UnsupportedOperationException("DiskOrderScan not supported by LSM-RTree.");
         }
     }
 
diff --git a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeInMemoryBufferCacheFactory.java b/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeInMemoryBufferCacheFactory.java
deleted file mode 100644
index 0ebb118..0000000
--- a/hyracks-storage-am-lsm-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/impls/LSMRTreeInMemoryBufferCacheFactory.java
+++ /dev/null
@@ -1,26 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree.impls;
-
-import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
-
-public class LSMRTreeInMemoryBufferCacheFactory {
-	
-	private IBufferCache bufferCache;
-    private final int pageSize;
-    private final int numPages;
-	
-    public LSMRTreeInMemoryBufferCacheFactory(int pageSize, int numPages) {
-    	this.pageSize = pageSize;
-    	this.numPages = numPages;
-        bufferCache = null;
-    }
-    
-    public synchronized IBufferCache createInMemoryBufferCache() {
-        if (bufferCache == null) {
-            ICacheMemoryAllocator allocator = new HeapBufferAllocator();
-            bufferCache = new LSMRTreeInMemoryBufferCache(allocator, pageSize, numPages);
-        }
-        return bufferCache;
-    }
-}
diff --git a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
index 71bbda9..86cad34e 100644
--- a/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
+++ b/hyracks-storage-am-rtree/src/main/java/edu/uci/ics/hyracks/storage/am/rtree/impls/RTree.java
@@ -237,6 +237,10 @@
         fileId = -1;
     }
 
+    public int getFileId() {
+        return fileId;
+    }
+    
     private RTreeOpContext createOpContext() {
         return new RTreeOpContext((IRTreeLeafFrame) leafFrameFactory.createFrame(),
                 (IRTreeInteriorFrame) interiorFrameFactory.createFrame(), freePageManager.getMetaDataFrameFactory()
diff --git a/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMTreeFileNameManagerTest.java b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMTreeFileNameManagerTest.java
new file mode 100644
index 0000000..d43f34f
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-lsm-common-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/common/LSMTreeFileNameManagerTest.java
@@ -0,0 +1,125 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.lsm.common;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
+
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Date;
+import java.util.HashSet;
+import java.util.LinkedList;
+import java.util.List;
+
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMFileNameManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeFileNameManager;
+
+public class LSMTreeFileNameManagerTest {
+    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+    protected final static String sep = System.getProperty("file.separator");
+    protected String baseDir;
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        baseDir = tmpDir + sep + "lsm_tree" + simpleDateFormat.format(new Date()) + sep;
+    }
+
+    public void flushFileNamesTest(boolean testFlushFileName) throws InterruptedException {
+        ILSMFileNameManager fileNameManager = new LSMTreeFileNameManager(baseDir);
+        LinkedList<String> fileNames = new LinkedList<String>();
+
+        int numFileNames = 100;
+        long sleepTime = 5;
+        for (int i = 0; i < numFileNames; i++) {
+            String fileName = null;
+            if (testFlushFileName) {
+                fileName = fileNameManager.getFlushFileName();
+            } else {
+                fileName = fileNameManager.getMergeFileName();
+            }
+            fileNames.addFirst(fileName);
+            Thread.sleep(sleepTime);
+        }
+
+        List<String> sortedFileNames = new ArrayList<String>();
+        sortedFileNames.addAll(fileNames);        
+
+        // Make sure the comparator sorts in the correct order (i.e., the
+        // reverse insertion order in this case).
+        Comparator<String> cmp = fileNameManager.getFileNameComparator();
+        Collections.sort(sortedFileNames, cmp);
+        for (int i = 0; i < numFileNames; i++) {
+            assertEquals(fileNames.get(i), sortedFileNames.get(i));
+        }
+    }
+
+    @Test
+    public void individualFileNamesTest() throws InterruptedException {
+        flushFileNamesTest(true);
+        flushFileNamesTest(false);
+    }
+
+    @Test
+    public void mixedFileNamesTest() throws InterruptedException {
+        ILSMFileNameManager fileNameManager = new LSMTreeFileNameManager(baseDir);
+        List<String> fileNames = new ArrayList<String>();
+        HashSet<String> flushFileNames = new HashSet<String>();
+        HashSet<String> mergeFileNames = new HashSet<String>();
+        
+        int numFileNames = 100;
+        long sleepTime = 5;
+        for (int i = 0; i < numFileNames; i++) {
+            String fileName = null;
+            if (i % 2 == 0) {
+                fileName = fileNameManager.getFlushFileName();
+                flushFileNames.add(fileName);
+            } else {
+                fileName = fileNameManager.getMergeFileName();
+                mergeFileNames.add(fileName);
+            }
+            fileNames.add(fileName);
+            Thread.sleep(sleepTime);
+        }
+
+        // Make sure the comparator sorts in the correct order.
+        // We only need to check whether the flushed files and merged files are
+        // clustered.
+        // We verified the secondary sort order (based on timestamp) in the
+        // individualFileNamesTest().
+        Comparator<String> cmp = fileNameManager.getFileNameComparator();
+        Collections.sort(fileNames, cmp);
+        // We expect flush file names at the front.
+        int i = 0;
+        while (flushFileNames.contains(fileNames.get(i))) {
+            i++;
+        }
+        // We expect only merge file names from here on.
+        while (i < numFileNames) {
+            if (!mergeFileNames.contains(fileNames.get(i))) {
+                fail("Expected a merge file name at position: " + i);
+            }
+            i++;
+        }
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeTest.java
new file mode 100644
index 0000000..0c61186
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/AbstractLSMRTreeTest.java
@@ -0,0 +1,166 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.lsm.rtree;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryBufferCache;
+import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeInMemoryBufferCache;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeInMemoryFreePageManager;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class AbstractLSMRTreeTest {
+    protected static final Logger LOGGER = Logger.getLogger(AbstractLSMRTreeTest.class.getName());
+
+    private static final long RANDOM_SEED = 50;
+    private static final int DEFAULT_DISK_PAGE_SIZE = 256;
+    private static final int DEFAULT_DISK_NUM_PAGES = 100;
+    private static final int DEFAULT_DISK_MAX_OPEN_FILES = 100;
+    private static final int DEFAULT_MEM_PAGE_SIZE = 256;
+    private static final int DEFAULT_MEM_NUM_PAGES = 100;
+    private static final int DEFAULT_HYRACKS_FRAME_SIZE = 128;
+    private static final int DUMMY_FILE_ID = -1;
+
+    protected final int diskPageSize;
+    protected final int diskNumPages;
+    protected final int diskMaxOpenFiles;
+    protected final int memPageSize;
+    protected final int memNumPages;
+    protected final int hyracksFrameSize;
+
+    protected IBufferCache diskBufferCache;
+    protected IFileMapProvider diskFileMapProvider;
+    protected InMemoryBufferCache memBufferCache;
+    protected InMemoryFreePageManager memFreePageManager;
+    protected IHyracksTaskContext ctx;
+
+    protected final Random rnd = new Random();
+    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+    protected final static String sep = System.getProperty("file.separator");
+    protected String onDiskDir;
+
+    public AbstractLSMRTreeTest() {
+        this.diskPageSize = DEFAULT_DISK_PAGE_SIZE;
+        this.diskNumPages = DEFAULT_DISK_NUM_PAGES;
+        this.diskMaxOpenFiles = DEFAULT_DISK_MAX_OPEN_FILES;
+        this.memPageSize = DEFAULT_MEM_PAGE_SIZE;
+        this.memNumPages = DEFAULT_MEM_NUM_PAGES;
+        this.hyracksFrameSize = DEFAULT_HYRACKS_FRAME_SIZE;
+    }
+
+    public AbstractLSMRTreeTest(int diskPageSize, int diskNumPages, int diskMaxOpenFiles, int memPageSize,
+            int memNumPages, int hyracksFrameSize) {
+        this.diskPageSize = diskPageSize;
+        this.diskNumPages = diskNumPages;
+        this.diskMaxOpenFiles = diskMaxOpenFiles;
+        this.memPageSize = memPageSize;
+        this.memNumPages = memNumPages;
+        this.hyracksFrameSize = hyracksFrameSize;
+    }
+
+    @Before
+    public void setUp() throws HyracksDataException {
+        onDiskDir = tmpDir + sep + "lsm_rtree_" + simpleDateFormat.format(new Date()) + sep;
+        ctx = TestUtils.create(getHyracksFrameSize());
+        TestStorageManagerComponentHolder.init(diskPageSize, diskNumPages, diskMaxOpenFiles);
+        diskBufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        diskFileMapProvider = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        memBufferCache = new LSMRTreeInMemoryBufferCache(new HeapBufferAllocator(), getMemPageSize(), getMemNumPages());
+        memFreePageManager = new LSMRTreeInMemoryFreePageManager(memNumPages, new LIFOMetaDataFrameFactory());
+        rnd.setSeed(RANDOM_SEED);
+    }
+
+    @After
+    public void tearDown() throws HyracksDataException {
+        diskBufferCache.close();
+        File f = new File(onDiskDir);
+        // TODO: For some reason the dir fails to be deleted. Ask Vinayak about
+        // this.
+        f.deleteOnExit();
+    }
+
+    public int getDiskPageSize() {
+        return diskPageSize;
+    }
+
+    public int getDiskNumPages() {
+        return diskNumPages;
+    }
+
+    public int getDiskMaxOpenFiles() {
+        return diskMaxOpenFiles;
+    }
+
+    public int getMemPageSize() {
+        return memPageSize;
+    }
+
+    public int getMemNumPages() {
+        return memNumPages;
+    }
+
+    public int getHyracksFrameSize() {
+        return hyracksFrameSize;
+    }
+
+    public int getFileId() {
+        return DUMMY_FILE_ID;
+    }
+
+    public IBufferCache getDiskBufferCache() {
+        return diskBufferCache;
+    }
+
+    public IFileMapProvider getDiskFileMapProvider() {
+        return diskFileMapProvider;
+    }
+
+    public InMemoryBufferCache getMemBufferCache() {
+        return memBufferCache;
+    }
+
+    public InMemoryFreePageManager getMemFreePageManager() {
+        return memFreePageManager;
+    }
+
+    public IHyracksTaskContext getHyracksTastContext() {
+        return ctx;
+    }
+
+    public String getOnDiskDir() {
+        return onDiskDir;
+    }
+
+    public Random getRandom() {
+        return rnd;
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/AbstractLSMTreeTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/AbstractLSMTreeTest.java
deleted file mode 100644
index ffcb02d..0000000
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/AbstractLSMTreeTest.java
+++ /dev/null
@@ -1,76 +0,0 @@
-package edu.uci.ics.hyracks.storage.am.lsm.rtree;
-
-import java.io.File;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.Random;
-import java.util.logging.Logger;
-
-import org.junit.After;
-import org.junit.Before;
-
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
-import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
-import edu.uci.ics.hyracks.api.io.FileReference;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
-
-public abstract class AbstractLSMTreeTest {
-    protected static final Logger LOGGER = Logger.getLogger(AbstractLSMTreeTest.class.getName());
-    public static final long RANDOM_SEED = 50;
-    
-    private static final int PAGE_SIZE = 256;
-    private static final int NUM_PAGES = 10;
-    private static final int MAX_OPEN_FILES = 10;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-        
-    protected IHyracksTaskContext ctx; 
-    protected IBufferCache bufferCache;
-    protected int btreeFileId;
-    
-    protected final Random rnd = new Random();
-    protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
-    protected final static String tmpDir = System.getProperty("java.io.tmpdir");
-    protected final static String sep = System.getProperty("file.separator");
-    protected String fileName;    
-    
-    @Before
-    public void setUp() throws HyracksDataException {
-        fileName = tmpDir + sep + simpleDateFormat.format(new Date());
-        ctx = TestUtils.create(getHyracksFrameSize());
-        TestStorageManagerComponentHolder.init(getPageSize(), getNumPages(), getMaxOpenFiles());
-        bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName)); 
-        bufferCache.createFile(file);
-        btreeFileId = fmp.lookupFileId(file);
-        bufferCache.openFile(btreeFileId);
-        rnd.setSeed(RANDOM_SEED);
-    }
-    
-    @After
-    public void tearDown() throws HyracksDataException {
-        bufferCache.closeFile(btreeFileId);
-        bufferCache.close();
-        File f = new File(fileName);
-        f.deleteOnExit();
-    }
-    
-    public int getPageSize() {
-        return PAGE_SIZE;
-    }
-    
-    public int getNumPages() {
-        return NUM_PAGES;
-    }
-    
-    public int getHyracksFrameSize() {
-        return HYRACKS_FRAME_SIZE;
-    }
-    
-    public int getMaxOpenFiles() {
-        return MAX_OPEN_FILES;
-    }
-}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeSerachTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeSerachTest.java
new file mode 100644
index 0000000..6fe5ad3
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeSerachTest.java
@@ -0,0 +1,248 @@
+package edu.uci.ics.hyracks.storage.am.lsm.rtree;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Random;
+import java.util.logging.Level;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManagerFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMFileNameManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeFileNameManager;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.BTreeFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeInMemoryFreePageManager;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.RTreeFactory;
+import edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples.LSMTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
+import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
+
+public class LSMRTreeSerachTest extends AbstractLSMRTreeTest {
+
+    // create LSM-RTree of two dimensions
+    // fill the tree with random values using insertions
+    // and then perform range search
+    @Test
+    public void Test1() throws Exception {
+
+        // declare r-tree keys
+        int rtreeKeyFieldCount = 4;
+        IBinaryComparator[] rtreeCmps = new IBinaryComparator[rtreeKeyFieldCount];
+        rtreeCmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
+        rtreeCmps[1] = rtreeCmps[0];
+        rtreeCmps[2] = rtreeCmps[0];
+        rtreeCmps[3] = rtreeCmps[0];
+
+        // declare b-tree keys
+        int btreeKeyFieldCount = 5;
+        IBinaryComparator[] btreeCmps = new IBinaryComparator[btreeKeyFieldCount];
+        btreeCmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
+        btreeCmps[1] = btreeCmps[0];
+        btreeCmps[2] = btreeCmps[0];
+        btreeCmps[3] = btreeCmps[0];
+        btreeCmps[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+        // declare tuple fields
+        int fieldCount = 5;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+
+        MultiComparator rtreeCmp = new MultiComparator(rtreeCmps);
+        MultiComparator btreeCmp = new MultiComparator(btreeCmps);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmps.length, DoublePointable.FACTORY);
+
+        LSMTypeAwareTupleWriterFactory rtreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+        LSMTypeAwareTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+        ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+        ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+
+        ITreeIndexFrameFactory btreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(btreeTupleWriterFactory);
+        ITreeIndexFrameFactory btreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(btreeTupleWriterFactory);
+
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        InMemoryFreePageManager memFreePageManager = new LSMRTreeInMemoryFreePageManager(1000, metaFrameFactory);
+
+        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
+                metaFrameFactory);
+
+        RTreeFactory diskRTreeFactory = new RTreeFactory(diskBufferCache, freePageManagerFactory, rtreeCmp, fieldCount,
+                rtreeInteriorFrameFactory, rtreeLeafFrameFactory);
+        BTreeFactory diskBTreeFactory = new BTreeFactory(diskBufferCache, freePageManagerFactory, btreeCmp, fieldCount,
+                btreeInteriorFrameFactory, btreeLeafFrameFactory);
+
+        ILSMFileNameManager fileNameManager = new LSMTreeFileNameManager(onDiskDir);
+        LSMRTree lsmRTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
+                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
+                diskRTreeFactory, diskBTreeFactory, diskFileMapProvider, fieldCount, rtreeCmp, btreeCmp);
+
+        lsmRTree.create(getFileId());
+        lsmRTree.open(getFileId());
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        @SuppressWarnings("rawtypes")
+        ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        ITreeIndexAccessor lsmTreeAccessor = lsmRTree.createAccessor();
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        for (int i = 0; i < 5000; i++) {
+
+            double p1x = rnd.nextDouble();
+            double p1y = rnd.nextDouble();
+            double p2x = rnd.nextDouble();
+            double p2y = rnd.nextDouble();
+
+            int pk = rnd.nextInt();
+
+            tb.reset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                 if (i % 1000 == 0) {
+                LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                        + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                 }
+            }
+
+            try {
+                lsmTreeAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        for (int i = 0; i < 50; i++) {
+            double p1x = rnd.nextDouble();
+            double p1y = rnd.nextDouble();
+            double p2x = rnd.nextDouble();
+            double p2y = rnd.nextDouble();
+
+            int pk = rnd.nextInt();
+
+            tb.reset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(i + " Searching for: " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                        + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+            }
+
+            ITreeIndexCursor searchCursor = new LSMRTreeSearchCursor();
+            SearchPredicate searchPredicate = new SearchPredicate(tuple, rtreeCmp);
+
+            lsmTreeAccessor.search(searchCursor, searchPredicate);
+
+            ArrayList<Integer> results = new ArrayList<Integer>();
+            try {
+                while (searchCursor.hasNext()) {
+                    searchCursor.next();
+                    ITupleReference frameTuple = searchCursor.getTuple();
+                    ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(4),
+                            frameTuple.getFieldStart(4), frameTuple.getFieldLength(4));
+                    DataInput dataIn = new DataInputStream(inStream);
+                    Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
+                    results.add(res);
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            } finally {
+                searchCursor.close();
+            }
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("There are " + results.size() + " objects that satisfy the query");
+            }
+        }
+
+        lsmRTree.close();
+        memBufferCache.close();
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeTest.java b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeTest.java
index 7755f21..867efcc 100644
--- a/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeTest.java
+++ b/hyracks-tests/hyracks-storage-am-lsm-rtree-test/src/test/java/edu/uci/ics/hyracks/storage/am/lsm/rtree/LSMRTreeTest.java
@@ -1,24 +1,32 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
 package edu.uci.ics.hyracks.storage.am.lsm.rtree;
 
-import java.io.ByteArrayInputStream;
-import java.io.DataInput;
-import java.io.DataInputStream;
 import java.io.DataOutput;
-import java.io.File;
 import java.nio.ByteBuffer;
-import java.util.ArrayList;
 import java.util.Random;
 import java.util.logging.Level;
 
 import org.junit.Test;
 
 import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
-import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
 import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
 import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
 import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
 import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
-import edu.uci.ics.hyracks.api.io.FileReference;
 import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
 import edu.uci.ics.hyracks.data.std.primitive.DoublePointable;
 import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
@@ -26,61 +34,36 @@
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
 import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
-import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.DoubleSerializerDeserializer;
 import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
-import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
 import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
 import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManagerFactory;
 import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsm.common.api.ILSMFileNameManager;
 import edu.uci.ics.hyracks.storage.am.lsm.common.freepage.InMemoryFreePageManager;
+import edu.uci.ics.hyracks.storage.am.lsm.common.impls.LSMTreeFileNameManager;
 import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.BTreeFactory;
 import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTree;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeInMemoryBufferCacheFactory;
 import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeInMemoryFreePageManager;
-import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.LSMRTreeSearchCursor;
 import edu.uci.ics.hyracks.storage.am.lsm.rtree.impls.RTreeFactory;
 import edu.uci.ics.hyracks.storage.am.lsm.rtree.tuples.LSMTypeAwareTupleWriterFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMInteriorFrameFactory;
 import edu.uci.ics.hyracks.storage.am.rtree.frames.RTreeNSMLeafFrameFactory;
-import edu.uci.ics.hyracks.storage.am.rtree.impls.SearchPredicate;
 import edu.uci.ics.hyracks.storage.am.rtree.util.RTreeUtils;
-import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
-import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
-import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
-import edu.uci.ics.hyracks.test.support.TestUtils;
 
-public class LSMRTreeTest extends AbstractLSMTreeTest {
+public class LSMRTreeTest extends AbstractLSMRTreeTest {
 
-    private static final int PAGE_SIZE = 256;
-    private static final int NUM_PAGES = 1000;
-    private static final int MAX_OPEN_FILES = 100;
-    private static final int HYRACKS_FRAME_SIZE = 128;
-    private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
-
-    // INSERT-DELETE TEST
+    // create an LSM-RTree of two dimensions
+    // fill the tree with random values using insertions
     @Test
-    public void Test1() throws Exception {
-        // in disk
-        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
-        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
-        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
-        FileReference file = new FileReference(new File(fileName));
-        bufferCache.createFile(file);
-        int fileId = fmp.lookupFileId(file);
-        bufferCache.openFile(fileId);
-
-        // in memory
-        LSMRTreeInMemoryBufferCacheFactory inMemBufferCacheFactory = new LSMRTreeInMemoryBufferCacheFactory(PAGE_SIZE,
-                NUM_PAGES);
-        IBufferCache memBufferCache = inMemBufferCacheFactory.createInMemoryBufferCache();
+    public void test01() throws Exception {
 
         // declare r-tree keys
         int rtreeKeyFieldCount = 4;
@@ -91,22 +74,26 @@
         rtreeCmps[3] = rtreeCmps[0];
 
         // declare b-tree keys
-        int btreeKeyFieldCount = 5;
+        int btreeKeyFieldCount = 7;
         IBinaryComparator[] btreeCmps = new IBinaryComparator[btreeKeyFieldCount];
         btreeCmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
         btreeCmps[1] = btreeCmps[0];
         btreeCmps[2] = btreeCmps[0];
         btreeCmps[3] = btreeCmps[0];
-        btreeCmps[4] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        btreeCmps[4] = btreeCmps[0];
+        btreeCmps[5] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        btreeCmps[6] = btreeCmps[0];
 
         // declare tuple fields
-        int fieldCount = 5;
+        int fieldCount = 7;
         ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
         typeTraits[0] = DoublePointable.TYPE_TRAITS;
         typeTraits[1] = DoublePointable.TYPE_TRAITS;
         typeTraits[2] = DoublePointable.TYPE_TRAITS;
         typeTraits[3] = DoublePointable.TYPE_TRAITS;
-        typeTraits[4] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
 
         MultiComparator rtreeCmp = new MultiComparator(rtreeCmps);
         MultiComparator btreeCmp = new MultiComparator(btreeCmps);
@@ -130,20 +117,21 @@
 
         InMemoryFreePageManager memFreePageManager = new LSMRTreeInMemoryFreePageManager(1000, metaFrameFactory);
 
-        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(bufferCache,
+        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
                 metaFrameFactory);
 
-        RTreeFactory diskRTreeFactory = new RTreeFactory(bufferCache, freePageManagerFactory, rtreeCmp, fieldCount,
+        RTreeFactory diskRTreeFactory = new RTreeFactory(diskBufferCache, freePageManagerFactory, rtreeCmp, fieldCount,
                 rtreeInteriorFrameFactory, rtreeLeafFrameFactory);
-        BTreeFactory diskBTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, btreeCmp, fieldCount,
+        BTreeFactory diskBTreeFactory = new BTreeFactory(diskBufferCache, freePageManagerFactory, btreeCmp, fieldCount,
                 btreeInteriorFrameFactory, btreeLeafFrameFactory);
 
+        ILSMFileNameManager fileNameManager = new LSMTreeFileNameManager(onDiskDir);
         LSMRTree lsmRTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
-                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, tmpDir, diskRTreeFactory,
-                diskBTreeFactory, fmp, fieldCount, rtreeCmp, btreeCmp);
+                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
+                diskRTreeFactory, diskBTreeFactory, diskFileMapProvider, fieldCount, rtreeCmp, btreeCmp);
 
-        lsmRTree.create(fileId);
-        lsmRTree.open(fileId);
+        lsmRTree.create(getFileId());
+        lsmRTree.open(getFileId());
 
         ByteBuffer frame = ctx.allocateFrame();
         FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
@@ -154,15 +142,163 @@
         @SuppressWarnings("rawtypes")
         ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
                 DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
-                DoubleSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
         RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
-
         IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
         accessor.reset(frame);
-
         FrameTupleReference tuple = new FrameTupleReference();
 
-        ITreeIndexAccessor lsmTreeAccessor = lsmRTree.createAccessor();
+        ITreeIndexAccessor indexAccessor = lsmRTree.createAccessor();
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        Random rnd2 = new Random();
+        rnd2.setSeed(50);
+        for (int i = 0; i < 5000; i++) {
+
+            double p1x = rnd.nextDouble();
+            double p1y = rnd.nextDouble();
+            double p2x = rnd.nextDouble();
+            double p2y = rnd.nextDouble();
+
+            double pk1 = rnd2.nextDouble();
+            int pk2 = rnd2.nextInt();
+            double pk3 = rnd2.nextDouble();
+
+            tb.reset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
+            }
+
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        lsmRTree.close();
+        memBufferCache.close();
+
+    }
+
+    // create an LSM-RTree of two dimensions
+    // fill the tree with random values using insertions
+    // and then delete all the tuples which result of an empty LSM-RTree
+    @Test
+    public void test02() throws Exception {
+
+        // declare r-tree keys
+        int rtreeKeyFieldCount = 4;
+        IBinaryComparator[] rtreeCmps = new IBinaryComparator[rtreeKeyFieldCount];
+        rtreeCmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
+        rtreeCmps[1] = rtreeCmps[0];
+        rtreeCmps[2] = rtreeCmps[0];
+        rtreeCmps[3] = rtreeCmps[0];
+
+        // declare b-tree keys
+        int btreeKeyFieldCount = 7;
+        IBinaryComparator[] btreeCmps = new IBinaryComparator[btreeKeyFieldCount];
+        btreeCmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
+        btreeCmps[1] = btreeCmps[0];
+        btreeCmps[2] = btreeCmps[0];
+        btreeCmps[3] = btreeCmps[0];
+        btreeCmps[4] = btreeCmps[0];
+        btreeCmps[5] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        btreeCmps[6] = btreeCmps[0];
+
+        // declare tuple fields
+        int fieldCount = 7;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
+
+        MultiComparator rtreeCmp = new MultiComparator(rtreeCmps);
+        MultiComparator btreeCmp = new MultiComparator(btreeCmps);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmps.length, DoublePointable.FACTORY);
+
+        LSMTypeAwareTupleWriterFactory rtreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+        LSMTypeAwareTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+        ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+        ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+
+        ITreeIndexFrameFactory btreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(btreeTupleWriterFactory);
+        ITreeIndexFrameFactory btreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(btreeTupleWriterFactory);
+
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        InMemoryFreePageManager memFreePageManager = new LSMRTreeInMemoryFreePageManager(1000, metaFrameFactory);
+
+        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
+                metaFrameFactory);
+
+        RTreeFactory diskRTreeFactory = new RTreeFactory(diskBufferCache, freePageManagerFactory, rtreeCmp, fieldCount,
+                rtreeInteriorFrameFactory, rtreeLeafFrameFactory);
+        BTreeFactory diskBTreeFactory = new BTreeFactory(diskBufferCache, freePageManagerFactory, btreeCmp, fieldCount,
+                btreeInteriorFrameFactory, btreeLeafFrameFactory);
+
+        ILSMFileNameManager fileNameManager = new LSMTreeFileNameManager(onDiskDir);
+        LSMRTree lsmRTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
+                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
+                diskRTreeFactory, diskBTreeFactory, diskFileMapProvider, fieldCount, rtreeCmp, btreeCmp);
+
+        lsmRTree.create(getFileId());
+        lsmRTree.open(getFileId());
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        @SuppressWarnings("rawtypes")
+        ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        ITreeIndexAccessor indexAccessor = lsmRTree.createAccessor();
 
         Random rnd = new Random();
         rnd.setSeed(50);
@@ -174,7 +310,9 @@
             double p2x = rnd.nextDouble();
             double p2y = rnd.nextDouble();
 
-            int pk = rnd.nextInt();
+            double pk1 = rnd.nextDouble();
+            int pk2 = rnd.nextInt();
+            double pk3 = rnd.nextDouble();
 
             tb.reset();
             DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
@@ -185,7 +323,11 @@
             tb.addFieldEndOffset();
             DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
             tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
             tb.addFieldEndOffset();
 
             appender.reset(frame, true);
@@ -194,82 +336,20 @@
             tuple.reset(accessor, 0);
 
             if (LOGGER.isLoggable(Level.INFO)) {
-                // if (i % 1000 == 0) {
-                LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
-                        + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
-                // }
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
             }
 
-            if (i == 1691) {
-                System.out.println();
-            }
             try {
-                lsmTreeAccessor.insert(tuple);
+                indexAccessor.insert(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
             }
         }
 
-        for (int i = 0; i < 50; i++) {
-            double p1x = rnd.nextDouble();
-            double p1y = rnd.nextDouble();
-            double p2x = rnd.nextDouble();
-            double p2y = rnd.nextDouble();
-
-            int pk = rnd.nextInt();
-
-            tb.reset();
-            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
-            tb.addFieldEndOffset();
-            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
-            tb.addFieldEndOffset();
-            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
-            tb.addFieldEndOffset();
-            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
-            tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
-            tb.addFieldEndOffset();
-
-            appender.reset(frame, true);
-            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
-
-            tuple.reset(accessor, 0);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(i + " Searching for: " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
-                        + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
-            }
-
-            ITreeIndexCursor searchCursor = new LSMRTreeSearchCursor();
-            SearchPredicate searchPredicate = new SearchPredicate(tuple, rtreeCmp);
-
-            lsmTreeAccessor.search(searchCursor, searchPredicate);
-
-            ArrayList<Integer> results = new ArrayList<Integer>();
-            try {
-                while (searchCursor.hasNext()) {
-                    searchCursor.next();
-                    ITupleReference frameTuple = searchCursor.getTuple();
-                    ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(4),
-                            frameTuple.getFieldStart(4), frameTuple.getFieldLength(4));
-                    DataInput dataIn = new DataInputStream(inStream);
-                    Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
-                    results.add(res);
-                    if (results.size() == 1632) {
-                        System.out.println();
-                    }
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            } finally {
-                searchCursor.close();
-            }
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("There are " + results.size() + " objects that satisfy the query");
-            }
-        }
-
         rnd.setSeed(50);
         for (int i = 0; i < 5000; i++) {
 
@@ -278,7 +358,9 @@
             double p2x = rnd.nextDouble();
             double p2y = rnd.nextDouble();
 
-            int pk = rnd.nextInt();
+            double pk1 = rnd.nextDouble();
+            int pk2 = rnd.nextInt();
+            double pk3 = rnd.nextDouble();
 
             tb.reset();
             DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
@@ -289,7 +371,11 @@
             tb.addFieldEndOffset();
             DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
             tb.addFieldEndOffset();
-            IntegerSerializerDeserializer.INSTANCE.serialize(pk, dos);
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
             tb.addFieldEndOffset();
 
             appender.reset(frame, true);
@@ -298,14 +384,14 @@
             tuple.reset(accessor, 0);
 
             if (LOGGER.isLoggable(Level.INFO)) {
-                // if (i % 1000 == 0) {
-                LOGGER.info("DELETING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
-                        + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
-                // }
+                if (i % 1000 == 0) {
+                    LOGGER.info("DELETING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
             }
 
             try {
-                lsmTreeAccessor.delete(tuple);
+                indexAccessor.delete(tuple);
             } catch (TreeIndexException e) {
             } catch (Exception e) {
                 e.printStackTrace();
@@ -313,7 +399,314 @@
         }
 
         lsmRTree.close();
-        bufferCache.closeFile(fileId);
         memBufferCache.close();
+
     }
-}
+
+    // create an LSM-RTree of three dimensions
+    // fill the tree with random values using insertions
+    @Test
+    public void test03() throws Exception {
+
+        // declare r-tree keys
+        int rtreeKeyFieldCount = 6;
+        IBinaryComparator[] rtreeCmps = new IBinaryComparator[rtreeKeyFieldCount];
+        rtreeCmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
+        rtreeCmps[1] = rtreeCmps[0];
+        rtreeCmps[2] = rtreeCmps[0];
+        rtreeCmps[3] = rtreeCmps[0];
+        rtreeCmps[4] = rtreeCmps[0];
+        rtreeCmps[5] = rtreeCmps[0];
+
+        // declare b-tree keys
+        int btreeKeyFieldCount = 9;
+        IBinaryComparator[] btreeCmps = new IBinaryComparator[btreeKeyFieldCount];
+        btreeCmps[0] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
+        btreeCmps[1] = btreeCmps[0];
+        btreeCmps[2] = btreeCmps[0];
+        btreeCmps[3] = btreeCmps[0];
+        btreeCmps[4] = btreeCmps[0];
+        btreeCmps[5] = btreeCmps[0];
+        btreeCmps[6] = btreeCmps[0];
+        btreeCmps[7] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        btreeCmps[8] = btreeCmps[0];
+
+        // declare tuple fields
+        int fieldCount = 9;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = DoublePointable.TYPE_TRAITS;
+        typeTraits[1] = DoublePointable.TYPE_TRAITS;
+        typeTraits[2] = DoublePointable.TYPE_TRAITS;
+        typeTraits[3] = DoublePointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = DoublePointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
+        typeTraits[7] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[8] = DoublePointable.TYPE_TRAITS;
+
+        MultiComparator rtreeCmp = new MultiComparator(rtreeCmps);
+        MultiComparator btreeCmp = new MultiComparator(btreeCmps);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmps.length, DoublePointable.FACTORY);
+
+        LSMTypeAwareTupleWriterFactory rtreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+        LSMTypeAwareTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+        ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+        ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+
+        ITreeIndexFrameFactory btreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(btreeTupleWriterFactory);
+        ITreeIndexFrameFactory btreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(btreeTupleWriterFactory);
+
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        InMemoryFreePageManager memFreePageManager = new LSMRTreeInMemoryFreePageManager(1000, metaFrameFactory);
+
+        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
+                metaFrameFactory);
+
+        RTreeFactory diskRTreeFactory = new RTreeFactory(diskBufferCache, freePageManagerFactory, rtreeCmp, fieldCount,
+                rtreeInteriorFrameFactory, rtreeLeafFrameFactory);
+        BTreeFactory diskBTreeFactory = new BTreeFactory(diskBufferCache, freePageManagerFactory, btreeCmp, fieldCount,
+                btreeInteriorFrameFactory, btreeLeafFrameFactory);
+
+        ILSMFileNameManager fileNameManager = new LSMTreeFileNameManager(onDiskDir);
+        LSMRTree lsmRTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
+                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
+                diskRTreeFactory, diskBTreeFactory, diskFileMapProvider, fieldCount, rtreeCmp, btreeCmp);
+
+        lsmRTree.create(getFileId());
+        lsmRTree.open(getFileId());
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        @SuppressWarnings("rawtypes")
+        ISerializerDeserializer[] recDescSers = { DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                DoubleSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        ITreeIndexAccessor indexAccessor = lsmRTree.createAccessor();
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        for (int i = 0; i < 5000; i++) {
+
+            double p1x = rnd.nextDouble();
+            double p1y = rnd.nextDouble();
+            double p1z = rnd.nextDouble();
+            double p2x = rnd.nextDouble();
+            double p2y = rnd.nextDouble();
+            double p2z = rnd.nextDouble();
+
+            double pk1 = rnd.nextDouble();
+            int pk2 = rnd.nextInt();
+            double pk3 = rnd.nextDouble();
+
+            tb.reset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.min(p1z, p2z), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(Math.max(p1z, p2z), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.min(p1z, p2z) + " " + " " + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y) + " "
+                            + Math.max(p1z, p2z));
+                }
+            }
+
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        lsmRTree.close();
+        memBufferCache.close();
+
+    }
+
+    // create an LSM-RTree of two dimensions
+    // fill the tree with random integer key values using insertions
+    @Test
+    public void test04() throws Exception {
+
+        // declare r-tree keys
+        int rtreeKeyFieldCount = 4;
+        IBinaryComparator[] rtreeCmps = new IBinaryComparator[rtreeKeyFieldCount];
+        rtreeCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        rtreeCmps[1] = rtreeCmps[0];
+        rtreeCmps[2] = rtreeCmps[0];
+        rtreeCmps[3] = rtreeCmps[0];
+
+        // declare b-tree keys
+        int btreeKeyFieldCount = 7;
+        IBinaryComparator[] btreeCmps = new IBinaryComparator[btreeKeyFieldCount];
+        btreeCmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+        btreeCmps[1] = btreeCmps[0];
+        btreeCmps[2] = btreeCmps[0];
+        btreeCmps[3] = btreeCmps[0];
+        btreeCmps[4] = PointableBinaryComparatorFactory.of(DoublePointable.FACTORY).createBinaryComparator();
+        btreeCmps[5] = btreeCmps[0];
+        btreeCmps[6] = btreeCmps[4];
+
+        // declare tuple fields
+        int fieldCount = 7;
+        ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+        typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[3] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[4] = DoublePointable.TYPE_TRAITS;
+        typeTraits[5] = IntegerPointable.TYPE_TRAITS;
+        typeTraits[6] = DoublePointable.TYPE_TRAITS;
+
+        MultiComparator rtreeCmp = new MultiComparator(rtreeCmps);
+        MultiComparator btreeCmp = new MultiComparator(btreeCmps);
+
+        // create value providers
+        IPrimitiveValueProviderFactory[] valueProviderFactories = RTreeUtils.createPrimitiveValueProviderFactories(
+                rtreeCmps.length, IntegerPointable.FACTORY);
+
+        LSMTypeAwareTupleWriterFactory rtreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+        LSMTypeAwareTupleWriterFactory btreeTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+        ITreeIndexFrameFactory rtreeInteriorFrameFactory = new RTreeNSMInteriorFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+        ITreeIndexFrameFactory rtreeLeafFrameFactory = new RTreeNSMLeafFrameFactory(rtreeTupleWriterFactory,
+                valueProviderFactories);
+
+        ITreeIndexFrameFactory btreeInteriorFrameFactory = new BTreeNSMInteriorFrameFactory(btreeTupleWriterFactory);
+        ITreeIndexFrameFactory btreeLeafFrameFactory = new BTreeNSMLeafFrameFactory(btreeTupleWriterFactory);
+
+        ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+        InMemoryFreePageManager memFreePageManager = new LSMRTreeInMemoryFreePageManager(1000, metaFrameFactory);
+
+        LinkedListFreePageManagerFactory freePageManagerFactory = new LinkedListFreePageManagerFactory(diskBufferCache,
+                metaFrameFactory);
+
+        RTreeFactory diskRTreeFactory = new RTreeFactory(diskBufferCache, freePageManagerFactory, rtreeCmp, fieldCount,
+                rtreeInteriorFrameFactory, rtreeLeafFrameFactory);
+        BTreeFactory diskBTreeFactory = new BTreeFactory(diskBufferCache, freePageManagerFactory, btreeCmp, fieldCount,
+                btreeInteriorFrameFactory, btreeLeafFrameFactory);
+
+        ILSMFileNameManager fileNameManager = new LSMTreeFileNameManager(onDiskDir);
+        LSMRTree lsmRTree = new LSMRTree(memBufferCache, memFreePageManager, rtreeInteriorFrameFactory,
+                rtreeLeafFrameFactory, btreeInteriorFrameFactory, btreeLeafFrameFactory, fileNameManager,
+                diskRTreeFactory, diskBTreeFactory, diskFileMapProvider, fieldCount, rtreeCmp, btreeCmp);
+
+        lsmRTree.create(getFileId());
+        lsmRTree.open(getFileId());
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+        DataOutput dos = tb.getDataOutput();
+
+        @SuppressWarnings("rawtypes")
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, DoubleSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        ITreeIndexAccessor indexAccessor = lsmRTree.createAccessor();
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        Random rnd2 = new Random();
+        rnd2.setSeed(50);
+        for (int i = 0; i < 5000; i++) {
+
+            int p1x = rnd.nextInt();
+            int p1y = rnd.nextInt();
+            int p2x = rnd.nextInt();
+            int p2y = rnd.nextInt();
+
+            double pk1 = rnd2.nextDouble();
+            int pk2 = rnd2.nextInt();
+            double pk3 = rnd2.nextDouble();
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(Math.min(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(Math.min(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(Math.max(p1x, p2x), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(Math.max(p1y, p2y), dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk1, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(pk2, dos);
+            tb.addFieldEndOffset();
+            DoubleSerializerDeserializer.INSTANCE.serialize(pk3, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                if (i % 1000 == 0) {
+                    LOGGER.info("INSERTING " + i + " " + Math.min(p1x, p2x) + " " + Math.min(p1y, p2y) + " "
+                            + Math.max(p1x, p2x) + " " + Math.max(p1y, p2y));
+                }
+            }
+
+            try {
+                indexAccessor.insert(tuple);
+            } catch (TreeIndexException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+        lsmRTree.close();
+        memBufferCache.close();
+
+    }
+}
\ No newline at end of file