Merged r289:290 from the hyracks_io_management branch

git-svn-id: https://hyracks.googlecode.com/svn/trunk/hyracks@291 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-tests/.project b/hyracks-tests/.project
new file mode 100644
index 0000000..198463d
--- /dev/null
+++ b/hyracks-tests/.project
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>hyracks-tests</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.maven.ide.eclipse.maven2Builder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.maven.ide.eclipse.maven2Nature</nature>
+	</natures>
+</projectDescription>
diff --git a/hyracks-tests/.settings/org.maven.ide.eclipse.prefs b/hyracks-tests/.settings/org.maven.ide.eclipse.prefs
new file mode 100644
index 0000000..99b89a6
--- /dev/null
+++ b/hyracks-tests/.settings/org.maven.ide.eclipse.prefs
@@ -0,0 +1,9 @@
+#Thu Jan 06 11:27:16 PST 2011
+activeProfiles=
+eclipse.preferences.version=1
+fullBuildGoals=process-test-resources
+includeModules=false
+resolveWorkspaceProjects=true
+resourceFilterGoals=process-resources resources\:testResources
+skipCompilerPlugin=true
+version=1
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/.classpath b/hyracks-tests/hyracks-storage-am-btree-test/.classpath
new file mode 100644
index 0000000..e44aa2f
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" path="src/test/java"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
+	<classpathentry kind="output" path="target/classes"/>
+</classpath>
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/.project b/hyracks-tests/hyracks-storage-am-btree-test/.project
new file mode 100644
index 0000000..bc6bc56
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/.project
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>hyracks-storage-am-btree-test</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.eclipse.jdt.core.javabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>org.maven.ide.eclipse.maven2Builder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+		<nature>org.maven.ide.eclipse.maven2Nature</nature>
+	</natures>
+</projectDescription>
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs b/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..3cd389e
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,6 @@
+#Thu Jan 06 11:27:16 PST 2011
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.maven.ide.eclipse.prefs b/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.maven.ide.eclipse.prefs
new file mode 100644
index 0000000..99b89a6
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/.settings/org.maven.ide.eclipse.prefs
@@ -0,0 +1,9 @@
+#Thu Jan 06 11:27:16 PST 2011
+activeProfiles=
+eclipse.preferences.version=1
+fullBuildGoals=process-test-resources
+includeModules=false
+resolveWorkspaceProjects=true
+resourceFilterGoals=process-resources resources\:testResources
+skipCompilerPlugin=true
+version=1
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/pom.xml b/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
new file mode 100644
index 0000000..7e78c2a
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/pom.xml
@@ -0,0 +1,55 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-btree-test</artifactId>
+  <version>0.1.4-SNAPSHOT</version>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-tests</artifactId>
+    <version>0.1.4-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.1.4-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-btree</artifactId>
+  		<version>0.1.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.1.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
new file mode 100644
index 0000000..0798c22
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeFieldPrefixNSMTest.java
@@ -0,0 +1,246 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeTupleWriter;
+import edu.uci.ics.hyracks.storage.am.btree.api.IPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.FieldPrefixSlotManager;
+import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BTreeFieldPrefixNSMTest {
+
+    private static final int PAGE_SIZE = 32768; // 32K
+    private static final int NUM_PAGES = 40;
+    private static final int HYRACKS_FRAME_SIZE = 128;
+
+    private String tmpDir = System.getProperty("java.io.tmpdir");
+
+    // to help with the logger madness
+    private void print(String str) {
+        System.out.print(str);
+
+        // if(GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.FINEST)) {
+        // GlobalConfig.ASTERIX_LOGGER.finest(str);
+        // }
+    }
+
+    public class BufferAllocator implements ICacheMemoryAllocator {
+        @Override
+        public ByteBuffer[] allocate(int pageSize, int numPages) {
+            ByteBuffer[] buffers = new ByteBuffer[numPages];
+            for (int i = 0; i < numPages; ++i) {
+                buffers[i] = ByteBuffer.allocate(pageSize);
+            }
+            return buffers;
+        }
+    }
+
+    private ITupleReference createTuple(IHyracksStageletContext ctx, int f0, int f1, int f2, boolean print)
+            throws HyracksDataException {
+        if (print)
+            System.out.println("CREATING: " + f0 + " " + f1 + " " + f2);
+
+        ByteBuffer buf = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(3);
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(buf);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        tb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+        tb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+        tb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+        tb.addFieldEndOffset();
+
+        appender.reset(buf, true);
+        appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+        tuple.reset(accessor, 0);
+
+        return tuple;
+    }
+
+    @Test
+    public void test01() throws Exception {
+        IHyracksStageletContext stageletCtx = TestUtils.create(HYRACKS_FRAME_SIZE);
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(stageletCtx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(stageletCtx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 3;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(4);
+        typeTraits[1] = new TypeTrait(4);
+        typeTraits[2] = new TypeTrait(4);
+
+        // declare keys
+        int keyFieldCount = 3;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[2] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        // just for printing
+        ISerializerDeserializer[] sers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, 0), false);
+        try {
+
+            IPrefixSlotManager slotManager = new FieldPrefixSlotManager();
+            IBTreeTupleWriter tupleWriter = new TypeAwareTupleWriter(typeTraits);
+            FieldPrefixNSMLeafFrame frame = new FieldPrefixNSMLeafFrame(tupleWriter);
+            frame.setPage(page);
+            frame.initBuffer((byte) 0);
+            slotManager.setFrame(frame);
+            frame.setPrefixTupleCount(0);
+
+            String before = new String();
+            String after = new String();
+
+            int compactFreq = 5;
+            int compressFreq = 5;
+            int smallMax = 10;
+            int numRecords = 1000;
+
+            int[][] savedFields = new int[numRecords][3];
+
+            // insert records with random calls to compact and compress
+            for (int i = 0; i < numRecords; i++) {
+
+                if ((i + 1) % 100 == 0)
+                    print("INSERTING " + (i + 1) + " / " + numRecords + "\n");
+
+                int a = rnd.nextInt() % smallMax;
+                int b = rnd.nextInt() % smallMax;
+                int c = i;
+
+                ITupleReference tuple = createTuple(stageletCtx, a, b, c, false);
+                try {
+                    frame.insert(tuple, cmp);
+                } catch (BTreeException e) {
+                    e.printStackTrace();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+
+                savedFields[i][0] = a;
+                savedFields[i][1] = b;
+                savedFields[i][2] = c;
+
+                if (rnd.nextInt() % compactFreq == 0) {
+                    before = frame.printKeys(cmp, sers);
+                    frame.compact(cmp);
+                    after = frame.printKeys(cmp, sers);
+                    Assert.assertEquals(before, after);
+                }
+
+                if (rnd.nextInt() % compressFreq == 0) {
+                    before = frame.printKeys(cmp, sers);
+                    frame.compress(cmp);
+                    after = frame.printKeys(cmp, sers);
+                    Assert.assertEquals(before, after);
+                }
+
+            }
+
+            // delete records with random calls to compact and compress
+            for (int i = 0; i < numRecords; i++) {
+
+                if ((i + 1) % 100 == 0)
+                    print("DELETING " + (i + 1) + " / " + numRecords + "\n");
+
+                ITupleReference tuple = createTuple(stageletCtx, savedFields[i][0], savedFields[i][1],
+                        savedFields[i][2], false);
+                try {
+                    frame.delete(tuple, cmp, true);
+                } catch (Exception e) {
+                }
+
+                if (rnd.nextInt() % compactFreq == 0) {
+                    before = frame.printKeys(cmp, sers);
+                    frame.compact(cmp);
+                    after = frame.printKeys(cmp, sers);
+                    Assert.assertEquals(before, after);
+                }
+
+                if (rnd.nextInt() % compressFreq == 0) {
+                    before = frame.printKeys(cmp, sers);
+                    frame.compress(cmp);
+                    after = frame.printKeys(cmp, sers);
+                    Assert.assertEquals(before, after);
+                }
+            }
+
+        } finally {
+            bufferCache.unpin(page);
+        }
+
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
new file mode 100644
index 0000000..e725f7c
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/BTreeTest.java
@@ -0,0 +1,1233 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.impls.DiskOrderScanCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.tuples.SimpleTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+@SuppressWarnings("unchecked")
+public class BTreeTest {
+
+    private static final int PAGE_SIZE = 256;
+    private static final int NUM_PAGES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 128;
+
+    private String tmpDir = System.getProperty("java.io.tmpdir");
+    IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+    // to help with the logger madness
+    private void print(String str) {
+        System.out.print(str);
+
+        // if(GlobalConfig.ASTERIX_LOGGER.isLoggable(Level.FINEST)) {
+        // GlobalConfig.ASTERIX_LOGGER.finest(str);
+        // }
+    }
+
+    public class BufferAllocator implements ICacheMemoryAllocator {
+        @Override
+        public ByteBuffer[] allocate(int pageSize, int numPages) {
+            ByteBuffer[] buffers = new ByteBuffer[numPages];
+            for (int i = 0; i < numPages; ++i) {
+                buffers[i] = ByteBuffer.allocate(pageSize);
+            }
+            return buffers;
+        }
+    }
+
+    // FIXED-LENGTH KEY TEST
+    // create a B-tree with one fixed-length "key" field and one fixed-length
+    // "value" field
+    // fill B-tree with random values using insertions (not bulk load)
+    // perform ordered scan and range search
+    @Test
+    public void test01() throws Exception {
+
+        print("FIXED-LENGTH KEY TEST\n");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 2;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(4);
+        typeTraits[1] = new TypeTrait(4);
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        // SimpleTupleWriterFactory tupleWriterFactory = new
+        // SimpleTupleWriterFactory();
+        IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+        IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+        IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+        IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        long start = System.currentTimeMillis();
+
+        print("INSERTING INTO TREE\n");
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+        // 10000
+        for (int i = 0; i < 10000; i++) {
+
+            int f0 = rnd.nextInt() % 10000;
+            int f1 = 5;
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            // System.out.println(tuple.getFieldCount() + " " +
+            // tuple.getFieldLength(0) + " " + tuple.getFieldLength(1));
+
+            if (i % 1000 == 0) {
+                long end = System.currentTimeMillis();
+                print("INSERTING " + i + " : " + f0 + " " + f1 + " " + (end - start) + "\n");
+            }
+
+            try {
+                btree.insert(tuple, insertOpCtx);
+            } catch (BTreeException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+
+            // btree.printTree(leafFrame, interiorFrame);
+            // System.out.println();
+        }
+        // btree.printTree(leafFrame, interiorFrame);
+        // System.out.println();
+
+        int maxPage = btree.getMaxPage(metaFrame);
+        System.out.println("MAXPAGE: " + maxPage);
+
+        String stats = btree.printStats();
+        print(stats);
+
+        long end = System.currentTimeMillis();
+        long duration = end - start;
+        print("DURATION: " + duration + "\n");
+
+        // ordered scan
+
+        print("ORDERED SCAN:\n");
+        IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+        btree.search(scanCursor, nullPred, searchOpCtx);
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            scanCursor.close();
+        }
+
+        // disk-order scan
+        print("DISK-ORDER SCAN:\n");
+        DiskOrderScanCursor diskOrderCursor = new DiskOrderScanCursor(leafFrame);
+        btree.diskOrderScan(diskOrderCursor, leafFrame, metaFrame);
+        try {
+            while (diskOrderCursor.hasNext()) {
+                diskOrderCursor.next();
+                ITupleReference frameTuple = diskOrderCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            diskOrderCursor.close();
+        }
+
+        // range search in [-1000, 1000]
+        print("RANGE SEARCH:\n");
+
+        IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+        // build low and high keys
+        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+        DataOutput kdos = ktb.getDataOutput();
+
+        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+        keyAccessor.reset(frame);
+
+        appender.reset(frame, true);
+
+        // build and append low key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(-1000, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // build and append high key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(1000, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // create tuplereferences for search keys
+        FrameTupleReference lowKey = new FrameTupleReference();
+        lowKey.reset(keyAccessor, 0);
+
+        FrameTupleReference highKey = new FrameTupleReference();
+        highKey.reset(keyAccessor, 1);
+
+        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+        btree.search(rangeCursor, rangePred, searchOpCtx);
+
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            rangeCursor.close();
+        }
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+
+        print("\n");
+    }
+
+    // COMPOSITE KEY TEST (NON-UNIQUE B-TREE)
+    // create a B-tree with one two fixed-length "key" fields and one
+    // fixed-length "value" field
+    // fill B-tree with random values using insertions (not bulk load)
+    // perform ordered scan and range search
+    @Test
+    public void test02() throws Exception {
+
+        print("COMPOSITE KEY TEST\n");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 3;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(4);
+        typeTraits[1] = new TypeTrait(4);
+        typeTraits[2] = new TypeTrait(4);
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        // SimpleTupleWriterFactory tupleWriterFactory = new
+        // SimpleTupleWriterFactory();
+        IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+        IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+        IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+        IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        long start = System.currentTimeMillis();
+
+        print("INSERTING INTO TREE\n");
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+        for (int i = 0; i < 10000; i++) {
+            int f0 = rnd.nextInt() % 2000;
+            int f1 = rnd.nextInt() % 1000;
+            int f2 = 5;
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (i % 1000 == 0) {
+                print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
+            }
+
+            try {
+                btree.insert(tuple, insertOpCtx);
+            } catch (Exception e) {
+            }
+        }
+        // btree.printTree(leafFrame, interiorFrame);
+
+        long end = System.currentTimeMillis();
+        long duration = end - start;
+        print("DURATION: " + duration + "\n");
+
+        // try a simple index scan
+        print("ORDERED SCAN:\n");
+        IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+        btree.search(scanCursor, nullPred, searchOpCtx);
+
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            scanCursor.close();
+        }
+
+        // range search in [(-3),(3)]
+        print("RANGE SEARCH:\n");
+        IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+        // build low and high keys
+        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+        DataOutput kdos = ktb.getDataOutput();
+
+        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+        keyAccessor.reset(frame);
+
+        appender.reset(frame, true);
+
+        // build and append low key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(-3, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // build and append high key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(3, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // create tuplereferences for search keys
+        FrameTupleReference lowKey = new FrameTupleReference();
+        lowKey.reset(keyAccessor, 0);
+
+        FrameTupleReference highKey = new FrameTupleReference();
+        highKey.reset(keyAccessor, 1);
+
+        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps); // use
+        // only
+        // a
+        // single
+        // comparator
+        // for
+        // searching
+
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+        btree.search(rangeCursor, rangePred, searchOpCtx);
+
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            rangeCursor.close();
+        }
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+
+        print("\n");
+    }
+
+    // VARIABLE-LENGTH TEST
+    // create a B-tree with one variable-length "key" field and one
+    // variable-length "value" field
+    // fill B-tree with random values using insertions (not bulk load)
+    // perform ordered scan and range search
+    @Test
+    public void test03() throws Exception {
+
+        print("VARIABLE-LENGTH KEY TEST\n");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 2;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+        typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        SimpleTupleWriterFactory tupleWriterFactory = new SimpleTupleWriterFactory();
+        // TypeAwareTupleWriterFactory tupleWriterFactory = new
+        // TypeAwareTupleWriterFactory(typeTraits);
+        IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+        IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+        IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+        IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+        int maxLength = 10; // max string length to be generated
+        for (int i = 0; i < 10000; i++) {
+
+            String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+            String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+
+            tb.reset();
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+            tb.addFieldEndOffset();
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            if (i % 1000 == 0) {
+                // print("INSERTING " + i + ": " + cmp.printRecord(record, 0) +
+                // "\n");
+                print("INSERTING " + i + "\n");
+            }
+
+            try {
+                btree.insert(tuple, insertOpCtx);
+            } catch (Exception e) {
+                // e.printStackTrace();
+            }
+        }
+        // btree.printTree();
+
+        System.out.println("DONE INSERTING");
+
+        // ordered scan
+        print("ORDERED SCAN:\n");
+        IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+        btree.search(scanCursor, nullPred, searchOpCtx);
+
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            scanCursor.close();
+        }
+
+        // range search in ["cbf", cc7"]
+        print("RANGE SEARCH:\n");
+
+        IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+        // build low and high keys
+        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+        DataOutput kdos = ktb.getDataOutput();
+
+        ISerializerDeserializer[] keyDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+        keyAccessor.reset(frame);
+
+        appender.reset(frame, true);
+
+        // build and append low key
+        ktb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("cbf", kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // build and append high key
+        ktb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize("cc7", kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // create tuplereferences for search keys
+        FrameTupleReference lowKey = new FrameTupleReference();
+        lowKey.reset(keyAccessor, 0);
+
+        FrameTupleReference highKey = new FrameTupleReference();
+        highKey.reset(keyAccessor, 1);
+
+        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+        searchCmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+        btree.search(rangeCursor, rangePred, searchOpCtx);
+
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            rangeCursor.close();
+        }
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+
+        print("\n");
+    }
+
+    // DELETION TEST
+    // create a B-tree with one variable-length "key" field and one
+    // variable-length "value" field
+    // fill B-tree with random values using insertions, then delete entries
+    // one-by-one
+    // repeat procedure a few times on same B-tree
+    @Test
+    public void test04() throws Exception {
+
+        print("DELETION TEST\n");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 2;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+        typeTraits[1] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        // SimpleTupleWriterFactory tupleWriterFactory = new
+        // SimpleTupleWriterFactory();
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+        IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+        IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+        IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { UTF8StringSerializerDeserializer.INSTANCE,
+                UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+        BTreeOpContext deleteOpCtx = btree.createOpContext(BTreeOp.BTO_DELETE, leafFrame, interiorFrame, metaFrame);
+
+        int runs = 3;
+        for (int run = 0; run < runs; run++) {
+
+            print("DELETION TEST RUN: " + (run + 1) + "/" + runs + "\n");
+
+            print("INSERTING INTO BTREE\n");
+            int maxLength = 10;
+            int ins = 10000;
+            String[] f0s = new String[ins];
+            String[] f1s = new String[ins];
+            int insDone = 0;
+            int[] insDoneCmp = new int[ins];
+            for (int i = 0; i < ins; i++) {
+                String f0 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+                String f1 = randomString(Math.abs(rnd.nextInt()) % maxLength + 1, rnd);
+
+                f0s[i] = f0;
+                f1s[i] = f1;
+
+                tb.reset();
+                UTF8StringSerializerDeserializer.INSTANCE.serialize(f0, dos);
+                tb.addFieldEndOffset();
+                UTF8StringSerializerDeserializer.INSTANCE.serialize(f1, dos);
+                tb.addFieldEndOffset();
+
+                appender.reset(frame, true);
+                appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+                tuple.reset(accessor, 0);
+
+                if (i % 1000 == 0) {
+                    print("INSERTING " + i + "\n");
+                    // print("INSERTING " + i + ": " + cmp.printRecord(record,
+                    // 0) + "\n");
+                }
+
+                try {
+                    btree.insert(tuple, insertOpCtx);
+                    insDone++;
+                } catch (BTreeException e) {
+                    // e.printStackTrace();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+
+                insDoneCmp[i] = insDone;
+            }
+            // btree.printTree();
+            // btree.printStats();
+
+            print("DELETING FROM BTREE\n");
+            int delDone = 0;
+            for (int i = 0; i < ins; i++) {
+
+                tb.reset();
+                UTF8StringSerializerDeserializer.INSTANCE.serialize(f0s[i], dos);
+                tb.addFieldEndOffset();
+                UTF8StringSerializerDeserializer.INSTANCE.serialize(f1s[i], dos);
+                tb.addFieldEndOffset();
+
+                appender.reset(frame, true);
+                appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+                tuple.reset(accessor, 0);
+
+                if (i % 1000 == 0) {
+                    // print("DELETING " + i + ": " +
+                    // cmp.printRecord(records[i], 0) + "\n");
+                    print("DELETING " + i + "\n");
+                }
+
+                try {
+                    btree.delete(tuple, deleteOpCtx);
+                    delDone++;
+                } catch (BTreeException e) {
+                    // e.printStackTrace();
+                } catch (Exception e) {
+                    e.printStackTrace();
+                }
+
+                if (insDoneCmp[i] != delDone) {
+                    print("INCONSISTENT STATE, ERROR IN DELETION TEST\n");
+                    print("INSDONECMP: " + insDoneCmp[i] + " " + delDone + "\n");
+                    break;
+                }
+                // btree.printTree();
+            }
+            // btree.printTree(leafFrame, interiorFrame);
+
+            if (insDone != delDone) {
+                print("ERROR! INSDONE: " + insDone + " DELDONE: " + delDone);
+                break;
+            }
+        }
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+
+        print("\n");
+    }
+
+    // BULK LOAD TEST
+    // insert 100,000 records in bulk
+    // B-tree has a composite key to "simulate" non-unique index creation
+    // do range search
+    @Test
+    public void test05() throws Exception {
+
+        print("BULK LOAD TEST\n");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 3;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(4);
+        typeTraits[1] = new TypeTrait(4);
+        typeTraits[2] = new TypeTrait(4);
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        // SimpleTupleWriterFactory tupleWriterFactory = new
+        // SimpleTupleWriterFactory();
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+        IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+        IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+        IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        BTree.BulkLoadContext bulkLoadCtx = btree.beginBulkLoad(0.7f, leafFrame, interiorFrame, metaFrame);
+
+        // generate sorted records
+        int ins = 100000;
+        print("BULK LOADING " + ins + " RECORDS\n");
+        long start = System.currentTimeMillis();
+        for (int i = 0; i < ins; i++) {
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(5, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            btree.bulkLoadAddTuple(bulkLoadCtx, tuple);
+        }
+
+        btree.endBulkLoad(bulkLoadCtx);
+
+        // btree.printTree(leafFrame, interiorFrame);
+
+        long end = System.currentTimeMillis();
+        long duration = end - start;
+        print("DURATION: " + duration + "\n");
+
+        // range search
+        print("RANGE SEARCH:\n");
+        IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+        // build low and high keys
+        ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
+        DataOutput kdos = ktb.getDataOutput();
+
+        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+        keyAccessor.reset(frame);
+
+        appender.reset(frame, true);
+
+        // build and append low key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(44444, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // build and append high key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(44500, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // create tuplereferences for search keys
+        FrameTupleReference lowKey = new FrameTupleReference();
+        lowKey.reset(keyAccessor, 0);
+
+        FrameTupleReference highKey = new FrameTupleReference();
+        highKey.reset(keyAccessor, 1);
+
+        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+        // TODO: check when searching backwards
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+        BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+        btree.search(rangeCursor, rangePred, searchOpCtx);
+
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            rangeCursor.close();
+        }
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+
+        print("\n");
+    }
+
+    // TIME-INTERVAL INTERSECTION DEMO FOR EVENT PEOPLE
+    // demo for Arjun to show easy support of intersection queries on
+    // time-intervals
+    @Test
+    public void test06() throws Exception {
+
+        print("TIME-INTERVAL INTERSECTION DEMO\n");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 3;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(4);
+        typeTraits[1] = new TypeTrait(4);
+        typeTraits[2] = new TypeTrait(4);
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        // SimpleTupleWriterFactory tupleWriterFactory = new
+        // SimpleTupleWriterFactory();
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+        IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+        IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+        IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ByteBuffer frame = ctx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE, IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        long start = System.currentTimeMillis();
+
+        int intervalCount = 10;
+        int[][] intervals = new int[intervalCount][2];
+
+        intervals[0][0] = 10;
+        intervals[0][1] = 20;
+
+        intervals[1][0] = 11;
+        intervals[1][1] = 20;
+
+        intervals[2][0] = 12;
+        intervals[2][1] = 20;
+
+        intervals[3][0] = 13;
+        intervals[3][1] = 20;
+
+        intervals[4][0] = 14;
+        intervals[4][1] = 20;
+
+        intervals[5][0] = 20;
+        intervals[5][1] = 30;
+
+        intervals[6][0] = 20;
+        intervals[6][1] = 31;
+
+        intervals[7][0] = 20;
+        intervals[7][1] = 32;
+
+        intervals[8][0] = 20;
+        intervals[8][1] = 33;
+
+        intervals[9][0] = 20;
+        intervals[9][1] = 35;
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+        // int exceptionCount = 0;
+        for (int i = 0; i < intervalCount; i++) {
+            int f0 = intervals[i][0];
+            int f1 = intervals[i][1];
+            int f2 = rnd.nextInt() % 100;
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(f2, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            // print("INSERTING " + i + " : " + f0 + " " + f1 + "\n");
+            print("INSERTING " + i + "\n");
+
+            try {
+                btree.insert(tuple, insertOpCtx);
+            } catch (Exception e) {
+                // e.printStackTrace();
+            }
+        }
+        // btree.printTree(leafFrame, interiorFrame);
+        // btree.printStats();
+
+        long end = System.currentTimeMillis();
+        long duration = end - start;
+        print("DURATION: " + duration + "\n");
+
+        // try a simple index scan
+
+        print("ORDERED SCAN:\n");
+        IBTreeCursor scanCursor = new RangeSearchCursor(leafFrame);
+        RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+        BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+        btree.search(scanCursor, nullPred, searchOpCtx);
+
+        try {
+            while (scanCursor.hasNext()) {
+                scanCursor.next();
+                ITupleReference frameTuple = scanCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            scanCursor.close();
+        }
+
+        // try a range search
+        print("RANGE SEARCH:\n");
+        IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+
+        // build low and high keys
+        ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+        DataOutput kdos = ktb.getDataOutput();
+
+        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+        keyAccessor.reset(frame);
+
+        appender.reset(frame, true);
+
+        // build and append low key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+        ktb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(12, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // build and append high key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+        ktb.addFieldEndOffset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(19, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // create tuplereferences for search keys
+        FrameTupleReference lowKey = new FrameTupleReference();
+        lowKey.reset(keyAccessor, 0);
+
+        FrameTupleReference highKey = new FrameTupleReference();
+        highKey.reset(keyAccessor, 1);
+
+        IBinaryComparator[] searchCmps = new IBinaryComparator[2];
+        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        searchCmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+        // print("INDEX RANGE SEARCH ON: " + cmp.printKey(lowKey, 0) + " " +
+        // cmp.printKey(highKey, 0) + "\n");
+
+        RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+        btree.search(rangeCursor, rangePred, searchOpCtx);
+
+        try {
+            while (rangeCursor.hasNext()) {
+                rangeCursor.next();
+                ITupleReference frameTuple = rangeCursor.getTuple();
+                String rec = cmp.printTuple(frameTuple, recDescSers);
+                print(rec + "\n");
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+        } finally {
+            rangeCursor.close();
+        }
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+
+        print("\n");
+    }
+
+    public static String randomString(int length, Random random) {
+        String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+        StringBuilder strBuilder = new StringBuilder();
+        for (int i = 0; i < s.length() && i < length; i++) {
+            strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+        }
+        return strBuilder.toString();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
new file mode 100644
index 0000000..1651d56
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/RangeSearchCursorTest.java
@@ -0,0 +1,550 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Random;
+import java.util.TreeSet;
+
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeCursor;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.FieldPrefixNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class RangeSearchCursorTest {
+    private static final int PAGE_SIZE = 256;
+    private static final int NUM_PAGES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 128;
+
+    private String tmpDir = System.getProperty("java.io.tmpdir");
+
+    public class BufferAllocator implements ICacheMemoryAllocator {
+        @Override
+        public ByteBuffer[] allocate(int pageSize, int numPages) {
+            ByteBuffer[] buffers = new ByteBuffer[numPages];
+            for (int i = 0; i < numPages; ++i) {
+                buffers[i] = ByteBuffer.allocate(pageSize);
+            }
+            return buffers;
+        }
+    }
+
+    // declare fields
+    int fieldCount = 2;
+    ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+
+    TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+    IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+    IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+    IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+    IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+    IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+    IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+    IHyracksStageletContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+    ByteBuffer frame = ctx.allocateFrame();
+    FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+    ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+            IntegerSerializerDeserializer.INSTANCE };
+    RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+    IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+    FrameTupleReference tuple = new FrameTupleReference();
+
+    Random rnd = new Random(50);
+
+    @Before
+    public void setUp() {
+        typeTraits[0] = new TypeTrait(4);
+        typeTraits[1] = new TypeTrait(4);
+        accessor.reset(frame);
+    }
+
+    @Test
+    public void uniqueIndexTest() throws Exception {
+
+        System.out.println("TESTING RANGE SEARCH CURSOR ON UNIQUE INDEX");
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare keys
+        int keyFieldCount = 1;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+        // generate keys
+        int numKeys = 50;
+        int maxKey = 1000;
+        TreeSet<Integer> uniqueKeys = new TreeSet<Integer>();
+        ArrayList<Integer> keys = new ArrayList<Integer>();
+        while (uniqueKeys.size() < numKeys) {
+            int key = rnd.nextInt() % maxKey;
+            uniqueKeys.add(key);
+        }
+        for (Integer i : uniqueKeys) {
+            keys.add(i);
+        }
+
+        // insert keys into btree
+        for (int i = 0; i < keys.size(); i++) {
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            try {
+                btree.insert(tuple, insertOpCtx);
+            } catch (BTreeException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
+        int minSearchKey = -100;
+        int maxSearchKey = 100;
+
+        // System.out.println("STARTING SEARCH TESTS");
+
+        // forward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+
+        // backward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+    }
+
+    @Test
+    public void nonUniqueIndexTest() throws Exception {
+
+        System.out.println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE INDEX");
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+        // generate keys
+        int numKeys = 50;
+        int maxKey = 10;
+        ArrayList<Integer> keys = new ArrayList<Integer>();
+        for (int i = 0; i < numKeys; i++) {
+            int k = rnd.nextInt() % maxKey;
+            keys.add(k);
+        }
+        Collections.sort(keys);
+
+        // insert keys into btree
+        for (int i = 0; i < keys.size(); i++) {
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            try {
+                btree.insert(tuple, insertOpCtx);
+            } catch (BTreeException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
+        int minSearchKey = -100;
+        int maxSearchKey = 100;
+
+        // System.out.println("STARTING SEARCH TESTS");
+
+        // forward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+
+        // backward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+    }
+
+    @Test
+    public void nonUniqueFieldPrefixIndexTest() throws Exception {
+
+        System.out.println("TESTING RANGE SEARCH CURSOR ON NONUNIQUE FIELD-PREFIX COMPRESSED INDEX");
+
+        IBTreeLeafFrameFactory leafFrameFactory = new FieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        BTreeOpContext insertOpCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+        // generate keys
+        int numKeys = 50;
+        int maxKey = 10;
+        ArrayList<Integer> keys = new ArrayList<Integer>();
+        for (int i = 0; i < numKeys; i++) {
+            int k = rnd.nextInt() % maxKey;
+            keys.add(k);
+        }
+        Collections.sort(keys);
+
+        // insert keys into btree
+        for (int i = 0; i < keys.size(); i++) {
+
+            tb.reset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(keys.get(i).intValue(), dos);
+            tb.addFieldEndOffset();
+            IntegerSerializerDeserializer.INSTANCE.serialize(i, dos);
+            tb.addFieldEndOffset();
+
+            appender.reset(frame, true);
+            appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+            tuple.reset(accessor, 0);
+
+            try {
+                btree.insert(tuple, insertOpCtx);
+            } catch (BTreeException e) {
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+        }
+
+        // btree.printTree(leafFrame, interiorFrame, recDescSers);
+
+        int minSearchKey = -100;
+        int maxSearchKey = 100;
+
+        // System.out.println("STARTING SEARCH TESTS");
+
+        // forward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, true, true, true, false);
+
+        // backward searches
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, false, true, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, false, false);
+        performSearches(keys, btree, leafFrame, interiorFrame, minSearchKey, maxSearchKey, false, true, true, false);
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+    }
+
+    public RangePredicate createRangePredicate(int lk, int hk, boolean isForward, boolean lowKeyInclusive,
+            boolean highKeyInclusive, MultiComparator cmp, ITypeTrait[] typeTraits) throws HyracksDataException {
+        // build low and high keys
+        ArrayTupleBuilder ktb = new ArrayTupleBuilder(1);
+        DataOutput kdos = ktb.getDataOutput();
+
+        ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+        IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+        keyAccessor.reset(frame);
+
+        appender.reset(frame, true);
+
+        // build and append low key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(lk, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // build and append high key
+        ktb.reset();
+        IntegerSerializerDeserializer.INSTANCE.serialize(hk, kdos);
+        ktb.addFieldEndOffset();
+        appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+        // create tuplereferences for search keys
+        FrameTupleReference lowKey = new FrameTupleReference();
+        lowKey.reset(keyAccessor, 0);
+
+        FrameTupleReference highKey = new FrameTupleReference();
+        highKey.reset(keyAccessor, 1);
+
+        IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+        searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        MultiComparator searchCmp = new MultiComparator(typeTraits, searchCmps);
+
+        RangePredicate rangePred = new RangePredicate(isForward, lowKey, highKey, lowKeyInclusive, highKeyInclusive,
+                searchCmp, searchCmp);
+        return rangePred;
+    }
+
+    public void getExpectedResults(ArrayList<Integer> expectedResults, ArrayList<Integer> keys, int lk, int hk,
+            boolean isForward, boolean lowKeyInclusive, boolean highKeyInclusive) {
+
+        // special cases
+        if (lk == hk && (!lowKeyInclusive || !highKeyInclusive))
+            return;
+        if (lk > hk)
+            return;
+
+        if (isForward) {
+            for (int i = 0; i < keys.size(); i++) {
+                if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
+                    expectedResults.add(keys.get(i));
+                    continue;
+                }
+
+                if (lk < keys.get(i) && hk > keys.get(i)) {
+                    expectedResults.add(keys.get(i));
+                    continue;
+                }
+            }
+        } else {
+            for (int i = keys.size() - 1; i >= 0; i--) {
+                if ((lk == keys.get(i) && lowKeyInclusive) || (hk == keys.get(i) && highKeyInclusive)) {
+                    expectedResults.add(keys.get(i));
+                    continue;
+                }
+
+                if (lk < keys.get(i) && hk > keys.get(i)) {
+                    expectedResults.add(keys.get(i));
+                    continue;
+                }
+            }
+        }
+    }
+
+    public boolean performSearches(ArrayList<Integer> keys, BTree btree, IBTreeLeafFrame leafFrame,
+            IBTreeInteriorFrame interiorFrame, int minKey, int maxKey, boolean isForward, boolean lowKeyInclusive,
+            boolean highKeyInclusive, boolean printExpectedResults) throws Exception {
+
+        ArrayList<Integer> results = new ArrayList<Integer>();
+        ArrayList<Integer> expectedResults = new ArrayList<Integer>();
+
+        for (int i = minKey; i < maxKey; i++) {
+            for (int j = minKey; j < maxKey; j++) {
+
+                // if(i != -100 || j != 1) continue;
+
+                results.clear();
+                expectedResults.clear();
+
+                int lowKey = i;
+                int highKey = j;
+
+                IBTreeCursor rangeCursor = new RangeSearchCursor(leafFrame);
+                RangePredicate rangePred = createRangePredicate(lowKey, highKey, isForward, lowKeyInclusive,
+                        highKeyInclusive, btree.getMultiComparator(), btree.getMultiComparator().getTypeTraits());
+                BTreeOpContext searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame, interiorFrame, null);
+                btree.search(rangeCursor, rangePred, searchOpCtx);
+
+                try {
+                    while (rangeCursor.hasNext()) {
+                        rangeCursor.next();
+                        ITupleReference frameTuple = rangeCursor.getTuple();
+                        ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(0),
+                                frameTuple.getFieldStart(0), frameTuple.getFieldLength(0));
+                        DataInput dataIn = new DataInputStream(inStream);
+                        Integer res = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
+                        results.add(res);
+                    }
+                } catch (Exception e) {
+                    e.printStackTrace();
+                } finally {
+                    rangeCursor.close();
+                }
+
+                getExpectedResults(expectedResults, keys, lowKey, highKey, isForward, lowKeyInclusive, highKeyInclusive);
+
+                if (printExpectedResults) {
+                    if (expectedResults.size() > 0) {
+                        char l, u;
+
+                        if (lowKeyInclusive)
+                            l = '[';
+                        else
+                            l = '(';
+
+                        if (highKeyInclusive)
+                            u = ']';
+                        else
+                            u = ')';
+
+                        System.out.println("RANGE: " + l + " " + lowKey + " , " + highKey + " " + u);
+                        for (Integer r : expectedResults)
+                            System.out.print(r + " ");
+                        System.out.println();
+                    }
+                }
+
+                if (results.size() == expectedResults.size()) {
+                    for (int k = 0; k < results.size(); k++) {
+                        if (!results.get(k).equals(expectedResults.get(k))) {
+                            System.out.println("DIFFERENT RESULTS AT: i=" + i + " j=" + j + " k=" + k);
+                            System.out.println(results.get(k) + " " + expectedResults.get(k));
+                            return false;
+                        }
+                    }
+                } else {
+                    System.out.println("UNEQUAL NUMBER OF RESULTS AT: i=" + i + " j=" + j);
+                    System.out.println("RESULTS: " + results.size());
+                    System.out.println("EXPECTED RESULTS: " + expectedResults.size());
+                    return false;
+                }
+            }
+        }
+
+        return true;
+    }
+
+    @After
+    public void tearDown() {
+
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
new file mode 100644
index 0000000..28c360c
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-btree-test/src/test/java/edu/uci/ics/hyracks/storage/am/btree/StorageManagerTest.java
@@ -0,0 +1,260 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.btree;
+
+import java.io.File;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.storage.common.sync.LatchType;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class StorageManagerTest {
+    private static final int PAGE_SIZE = 256;
+    private static final int NUM_PAGES = 10;
+
+    private String tmpDir = System.getProperty("java.io.tmpdir");
+
+    public class PinnedLatchedPage {
+        public final ICachedPage page;
+        public final LatchType latch;
+        public final int pageId;
+
+        public PinnedLatchedPage(ICachedPage page, int pageId, LatchType latch) {
+            this.page = page;
+            this.pageId = pageId;
+            this.latch = latch;
+        }
+    }
+
+    public enum FileAccessType {
+        FTA_READONLY,
+        FTA_WRITEONLY,
+        FTA_MIXED,
+        FTA_UNLATCHED
+    }
+
+    public class FileAccessWorker implements Runnable {
+        private int workerId;
+        private final IBufferCache bufferCache;
+        private final int maxPages;
+        private final int fileId;
+        private final long thinkTime;
+        private final int maxLoopCount;
+        private final int maxPinnedPages;
+        private final int closeFileChance;
+        private final FileAccessType fta;
+        private int loopCount = 0;
+        private boolean fileIsOpen = false;
+        private Random rnd = new Random(50);
+        private List<PinnedLatchedPage> pinnedPages = new LinkedList<PinnedLatchedPage>();
+
+        public FileAccessWorker(int workerId, IBufferCache bufferCache, FileAccessType fta, int fileId, int maxPages,
+                int maxPinnedPages, int maxLoopCount, int closeFileChance, long thinkTime) {
+            this.bufferCache = bufferCache;
+            this.fileId = fileId;
+            this.maxPages = maxPages;
+            this.maxLoopCount = maxLoopCount;
+            this.maxPinnedPages = maxPinnedPages;
+            this.thinkTime = thinkTime;
+            this.closeFileChance = closeFileChance;
+            this.workerId = workerId;
+            this.fta = fta;
+        }
+
+        private void pinRandomPage() {
+            int pageId = Math.abs(rnd.nextInt() % maxPages);
+
+            System.out.println(workerId + " PINNING PAGE: " + pageId);
+
+            try {
+                ICachedPage page = bufferCache.pin(BufferedFileHandle.getDiskPageId(fileId, pageId), false);
+                LatchType latch = null;
+
+                switch (fta) {
+
+                    case FTA_UNLATCHED: {
+                        latch = null;
+                    }
+                        break;
+
+                    case FTA_READONLY: {
+                        System.out.println(workerId + " S LATCHING: " + pageId);
+                        page.acquireReadLatch();
+                        latch = LatchType.LATCH_S;
+                    }
+                        break;
+
+                    case FTA_WRITEONLY: {
+                        System.out.println(workerId + " X LATCHING: " + pageId);
+                        page.acquireWriteLatch();
+                        latch = LatchType.LATCH_X;
+                    }
+                        break;
+
+                    case FTA_MIXED: {
+                        if (rnd.nextInt() % 2 == 0) {
+                            System.out.println(workerId + " S LATCHING: " + pageId);
+                            page.acquireReadLatch();
+                            latch = LatchType.LATCH_S;
+                        } else {
+                            System.out.println(workerId + " X LATCHING: " + pageId);
+                            page.acquireWriteLatch();
+                            latch = LatchType.LATCH_X;
+                        }
+                    }
+                        break;
+
+                }
+
+                PinnedLatchedPage plPage = new PinnedLatchedPage(page, pageId, latch);
+                pinnedPages.add(plPage);
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        private void unpinRandomPage() {
+            int index = Math.abs(rnd.nextInt() % pinnedPages.size());
+            try {
+                PinnedLatchedPage plPage = pinnedPages.get(index);
+
+                if (plPage.latch != null) {
+                    if (plPage.latch == LatchType.LATCH_S) {
+                        System.out.println(workerId + " S UNLATCHING: " + plPage.pageId);
+                        plPage.page.releaseReadLatch();
+                    } else {
+                        System.out.println(workerId + " X UNLATCHING: " + plPage.pageId);
+                        plPage.page.releaseWriteLatch();
+                    }
+                }
+                System.out.println(workerId + " UNPINNING PAGE: " + plPage.pageId);
+
+                bufferCache.unpin(plPage.page);
+                pinnedPages.remove(index);
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        private void openFile() {
+            System.out.println(workerId + " OPENING FILE: " + fileId);
+            try {
+                bufferCache.openFile(fileId);
+                fileIsOpen = true;
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        private void closeFile() {
+            System.out.println(workerId + " CLOSING FILE: " + fileId);
+            try {
+                bufferCache.closeFile(fileId);
+                fileIsOpen = false;
+            } catch (HyracksDataException e) {
+                e.printStackTrace();
+            }
+        }
+
+        @Override
+        public void run() {
+
+            openFile();
+
+            while (loopCount < maxLoopCount) {
+                loopCount++;
+
+                System.out.println(workerId + " LOOP: " + loopCount + "/" + maxLoopCount);
+
+                if (fileIsOpen) {
+
+                    // pin some pages
+                    int pagesToPin = Math.abs(rnd.nextInt()) % (maxPinnedPages - pinnedPages.size());
+                    for (int i = 0; i < pagesToPin; i++) {
+                        pinRandomPage();
+                    }
+
+                    // do some thinking
+                    try {
+                        Thread.sleep(thinkTime);
+                    } catch (InterruptedException e) {
+                        e.printStackTrace();
+                    }
+
+                    // unpin some pages
+                    if (!pinnedPages.isEmpty()) {
+                        int pagesToUnpin = Math.abs(rnd.nextInt()) % pinnedPages.size();
+                        for (int i = 0; i < pagesToUnpin; i++) {
+                            unpinRandomPage();
+                        }
+                    }
+
+                    // possibly close file
+                    int closeFileCheck = Math.abs(rnd.nextInt()) % closeFileChance;
+                    if (pinnedPages.isEmpty() || closeFileCheck == 0) {
+                        int numPinnedPages = pinnedPages.size();
+                        for (int i = 0; i < numPinnedPages; i++) {
+                            unpinRandomPage();
+                        }
+                        closeFile();
+                    }
+                } else {
+                    openFile();
+                }
+            }
+
+            if (fileIsOpen) {
+                int numPinnedPages = pinnedPages.size();
+                for (int i = 0; i < numPinnedPages; i++) {
+                    unpinRandomPage();
+                }
+                closeFile();
+            }
+        }
+    }
+
+    @Test
+    public void oneThreadOneFileTest() throws Exception {
+        IHyracksStageletContext ctx = TestUtils.create(32768);
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+        IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+        IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "testfile01.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+
+        Thread worker = new Thread(new FileAccessWorker(0, bufferCache, FileAccessType.FTA_UNLATCHED, fileId, 10, 10,
+                100, 10, 0));
+
+        worker.start();
+
+        worker.join();
+
+        bufferCache.close();
+    }
+}
\ No newline at end of file
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath b/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath
new file mode 100644
index 0000000..e44aa2f
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/.classpath
@@ -0,0 +1,7 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<classpath>
+	<classpathentry kind="src" path="src/test/java"/>
+	<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/JavaSE-1.6"/>
+	<classpathentry kind="con" path="org.maven.ide.eclipse.MAVEN2_CLASSPATH_CONTAINER"/>
+	<classpathentry kind="output" path="target/classes"/>
+</classpath>
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/.project b/hyracks-tests/hyracks-storage-am-invertedindex-test/.project
new file mode 100644
index 0000000..f60b2f9
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/.project
@@ -0,0 +1,23 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<projectDescription>
+	<name>hyracks-storage-am-invertedindex-test</name>
+	<comment></comment>
+	<projects>
+	</projects>
+	<buildSpec>
+		<buildCommand>
+			<name>org.eclipse.jdt.core.javabuilder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+		<buildCommand>
+			<name>org.maven.ide.eclipse.maven2Builder</name>
+			<arguments>
+			</arguments>
+		</buildCommand>
+	</buildSpec>
+	<natures>
+		<nature>org.eclipse.jdt.core.javanature</nature>
+		<nature>org.maven.ide.eclipse.maven2Nature</nature>
+	</natures>
+</projectDescription>
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs b/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs
new file mode 100644
index 0000000..3cd389e
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.eclipse.jdt.core.prefs
@@ -0,0 +1,6 @@
+#Thu Jan 06 11:27:16 PST 2011
+eclipse.preferences.version=1
+org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.6
+org.eclipse.jdt.core.compiler.compliance=1.6
+org.eclipse.jdt.core.compiler.problem.forbiddenReference=warning
+org.eclipse.jdt.core.compiler.source=1.6
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.maven.ide.eclipse.prefs b/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.maven.ide.eclipse.prefs
new file mode 100644
index 0000000..99b89a6
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/.settings/org.maven.ide.eclipse.prefs
@@ -0,0 +1,9 @@
+#Thu Jan 06 11:27:16 PST 2011
+activeProfiles=
+eclipse.preferences.version=1
+fullBuildGoals=process-test-resources
+includeModules=false
+resolveWorkspaceProjects=true
+resourceFilterGoals=process-resources resources\:testResources
+skipCompilerPlugin=true
+version=1
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml b/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
new file mode 100644
index 0000000..b3c62ae
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/pom.xml
@@ -0,0 +1,55 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-storage-am-invertedindex-test</artifactId>
+  <version>0.1.4-SNAPSHOT</version>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks-tests</artifactId>
+    <version>0.1.4-SNAPSHOT</version>
+  </parent>
+
+  <build>
+    <plugins>
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.0.2</version>
+        <configuration>
+          <source>1.6</source>
+          <target>1.6</target>
+        </configuration>
+      </plugin>
+    </plugins>
+  </build>
+  <dependencies>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-control-nc</artifactId>
+  		<version>0.1.4-SNAPSHOT</version>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-storage-am-invertedindex</artifactId>
+  		<version>0.1.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>compile</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>edu.uci.ics.hyracks</groupId>
+  		<artifactId>hyracks-test-support</artifactId>
+  		<version>0.1.4-SNAPSHOT</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  	<dependency>
+  		<groupId>junit</groupId>
+  		<artifactId>junit</artifactId>
+  		<version>4.8.1</version>
+  		<type>jar</type>
+  		<scope>test</scope>
+  	</dependency>
+  </dependencies>
+</project>
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java
new file mode 100644
index 0000000..0e53d3f
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/searchers/SimpleConjunctiveSearcherTest.java
@@ -0,0 +1,297 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.searchers;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+import java.util.UUID;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.application.INCApplicationContext;
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksJobletContext;
+import edu.uci.ics.hyracks.api.context.IHyracksRootContext;
+import edu.uci.ics.hyracks.api.context.IHyracksStageletContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTrait;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.TypeTrait;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.MetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.btree.impls.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.btree.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.am.invertedindex.api.IInvertedIndexResultCursor;
+import edu.uci.ics.hyracks.storage.am.invertedindex.impls.SimpleConjunctiveSearcher;
+import edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers.DelimitedUTF8StringBinaryTokenizer;
+import edu.uci.ics.hyracks.storage.common.IStorageManagerInterface;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestJobletContext;
+import edu.uci.ics.hyracks.test.support.TestNCApplicationContext;
+import edu.uci.ics.hyracks.test.support.TestRootContext;
+import edu.uci.ics.hyracks.test.support.TestStageletContext;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerInterface;
+
+public class SimpleConjunctiveSearcherTest {
+    // testing params
+    // private static final int PAGE_SIZE = 256;
+    // private static final int NUM_PAGES = 10;
+    // private static final int HYRACKS_FRAME_SIZE = 256;
+
+    // realistic params
+    // private static final int PAGE_SIZE = 65536;
+    private static final int PAGE_SIZE = 32768;
+    private static final int NUM_PAGES = 10;
+    private static final int HYRACKS_FRAME_SIZE = 32768;
+
+    static {
+        TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES);
+    }
+
+    private String tmpDir = System.getProperty("java.io.tmpdir");
+
+    public class BufferAllocator implements ICacheMemoryAllocator {
+        @Override
+        public ByteBuffer[] allocate(int pageSize, int numPages) {
+            ByteBuffer[] buffers = new ByteBuffer[numPages];
+            for (int i = 0; i < numPages; ++i) {
+                buffers[i] = ByteBuffer.allocate(pageSize);
+            }
+            return buffers;
+        }
+    }
+
+    @Test
+    public void test01() throws Exception {
+        IHyracksRootContext rootCtx = new TestRootContext(HYRACKS_FRAME_SIZE);
+        INCApplicationContext appCtx = new TestNCApplicationContext(rootCtx);
+        IHyracksJobletContext jobletCtx = new TestJobletContext(appCtx, UUID.randomUUID(), 0);
+        IHyracksStageletContext stageletCtx = new TestStageletContext(jobletCtx, UUID.randomUUID());
+
+        IStorageManagerInterface smi = new TestStorageManagerInterface();
+
+        IBufferCache bufferCache = smi.getBufferCache(stageletCtx);
+        IFileMapProvider fmp = smi.getFileMapProvider(stageletCtx);
+        FileReference file = new FileReference(new File(tmpDir + "/" + "btreetest.bin"));
+        bufferCache.createFile(file);
+        int fileId = fmp.lookupFileId(file);
+        bufferCache.openFile(fileId);
+
+        // declare fields
+        int fieldCount = 2;
+        ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+        typeTraits[0] = new TypeTrait(ITypeTrait.VARIABLE_LENGTH);
+        typeTraits[1] = new TypeTrait(4);
+
+        // declare keys
+        int keyFieldCount = 2;
+        IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+        cmps[0] = UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+        cmps[1] = IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+
+        MultiComparator cmp = new MultiComparator(typeTraits, cmps);
+
+        TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+        // SimpleTupleWriterFactory tupleWriterFactory = new
+        // SimpleTupleWriterFactory();
+        IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory(tupleWriterFactory);
+        // IBTreeLeafFrameFactory leafFrameFactory = new
+        // FieldPrefixNSMLeafFrameFactory(tupleWriterFactory);
+        IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory(tupleWriterFactory);
+        IBTreeMetaDataFrameFactory metaFrameFactory = new MetaDataFrameFactory();
+
+        IBTreeLeafFrame leafFrame = leafFrameFactory.getFrame();
+        IBTreeInteriorFrame interiorFrame = interiorFrameFactory.getFrame();
+        IBTreeMetaDataFrame metaFrame = metaFrameFactory.getFrame();
+
+        BTree btree = new BTree(bufferCache, interiorFrameFactory, leafFrameFactory, cmp);
+        btree.create(fileId, leafFrame, metaFrame);
+        btree.open(fileId);
+
+        Random rnd = new Random();
+        rnd.setSeed(50);
+
+        ByteBuffer frame = stageletCtx.allocateFrame();
+        FrameTupleAppender appender = new FrameTupleAppender(stageletCtx.getFrameSize());
+        ArrayTupleBuilder tb = new ArrayTupleBuilder(cmp.getFieldCount());
+        DataOutput dos = tb.getDataOutput();
+
+        ISerializerDeserializer[] btreeSerde = { UTF8StringSerializerDeserializer.INSTANCE,
+                IntegerSerializerDeserializer.INSTANCE };
+        RecordDescriptor btreeRecDesc = new RecordDescriptor(btreeSerde);
+        IFrameTupleAccessor accessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), btreeRecDesc);
+        accessor.reset(frame);
+        FrameTupleReference tuple = new FrameTupleReference();
+
+        List<String> tokens = new ArrayList<String>();
+        tokens.add("computer");
+        tokens.add("hyracks");
+        tokens.add("fast");
+        tokens.add("university");
+        tokens.add("science");
+        tokens.add("major");
+
+        int maxId = 10000;
+        int addProb = 0;
+        int addProbStep = 2;
+
+        BTreeOpContext opCtx = btree.createOpContext(BTreeOp.BTO_INSERT, leafFrame, interiorFrame, metaFrame);
+
+        for (int i = 0; i < tokens.size(); i++) {
+
+            addProb += addProbStep;
+            for (int j = 0; j < maxId; j++) {
+                if ((Math.abs(rnd.nextInt()) % addProb) == 0) {
+                    tb.reset();
+                    UTF8StringSerializerDeserializer.INSTANCE.serialize(tokens.get(i), dos);
+                    tb.addFieldEndOffset();
+                    IntegerSerializerDeserializer.INSTANCE.serialize(j, dos);
+                    tb.addFieldEndOffset();
+
+                    appender.reset(frame, true);
+                    appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+                    tuple.reset(accessor, 0);
+
+                    try {
+                        btree.insert(tuple, opCtx);
+                    } catch (Exception e) {
+                        e.printStackTrace();
+                    }
+                }
+            }
+        }
+
+        int numPages = btree.getMaxPage(metaFrame);
+        System.out.println("NUMPAGES: " + numPages);
+
+        // build query as tuple reference
+        ISerializerDeserializer[] querySerde = { UTF8StringSerializerDeserializer.INSTANCE };
+        RecordDescriptor queryRecDesc = new RecordDescriptor(querySerde);
+
+        FrameTupleAppender queryAppender = new FrameTupleAppender(stageletCtx.getFrameSize());
+        ArrayTupleBuilder queryTb = new ArrayTupleBuilder(querySerde.length);
+        DataOutput queryDos = queryTb.getDataOutput();
+
+        IFrameTupleAccessor queryAccessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), queryRecDesc);
+        queryAccessor.reset(frame);
+        FrameTupleReference queryTuple = new FrameTupleReference();
+
+        String query = "computer hyracks fast";
+        char queryDelimiter = ' ';
+        IBinaryTokenizer queryTokenizer = new DelimitedUTF8StringBinaryTokenizer(queryDelimiter);
+
+        queryTb.reset();
+        UTF8StringSerializerDeserializer.INSTANCE.serialize(query, queryDos);
+        queryTb.addFieldEndOffset();
+
+        queryAppender.reset(frame, true);
+        queryAppender.append(queryTb.getFieldEndOffsets(), queryTb.getByteArray(), 0, queryTb.getSize());
+        queryTuple.reset(queryAccessor, 0);
+
+        int numKeyFields = 1;
+        int numValueFields = 1;
+        ISerializerDeserializer[] resultSerde = new ISerializerDeserializer[numValueFields];
+        for (int i = 0; i < numValueFields; i++) {
+            resultSerde[i] = btreeSerde[numKeyFields + i];
+        }
+        RecordDescriptor resultRecDesc = new RecordDescriptor(resultSerde);
+        FrameTupleAccessor resultAccessor = new FrameTupleAccessor(stageletCtx.getFrameSize(), resultRecDesc);
+        FrameTupleReference resultTuple = new FrameTupleReference();
+
+        SimpleConjunctiveSearcher searcher = new SimpleConjunctiveSearcher(stageletCtx, btree, btreeRecDesc,
+                queryTokenizer, numKeyFields, numValueFields);
+
+        long timeStart = System.currentTimeMillis();
+        searcher.search(queryTuple, 0);
+        long timeEnd = System.currentTimeMillis();
+        System.out.println("SEARCH TIME: " + (timeEnd - timeStart) + "ms");
+
+        // System.out.println("INTERSECTION RESULTS");
+        IInvertedIndexResultCursor resultCursor = searcher.getResultCursor();
+        while (resultCursor.hasNext()) {
+            resultCursor.next();
+            resultAccessor.reset(resultCursor.getBuffer());
+            for (int i = 0; i < resultAccessor.getTupleCount(); i++) {
+                resultTuple.reset(resultAccessor, i);
+                for (int j = 0; j < resultTuple.getFieldCount(); j++) {
+                    ByteArrayInputStream inStream = new ByteArrayInputStream(resultTuple.getFieldData(j),
+                            resultTuple.getFieldStart(j), resultTuple.getFieldLength(j));
+                    DataInput dataIn = new DataInputStream(inStream);
+                    Object o = resultSerde[j].deserialize(dataIn);
+                    System.out.print(o + " ");
+                }
+                System.out.println();
+            }
+        }
+
+        /*
+         * IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+         * searchCmps[0] =
+         * UTF8StringBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+         * MultiComparator searchCmp = new MultiComparator(typeTraits,
+         * searchCmps);
+         * 
+         * // ordered scan IBTreeCursor scanCursor = new
+         * RangeSearchCursor(leafFrame); RangePredicate nullPred = new
+         * RangePredicate(true, null, null, true, true, null); BTreeOpContext
+         * searchOpCtx = btree.createOpContext(BTreeOp.BTO_SEARCH, leafFrame,
+         * interiorFrame, metaFrame); btree.search(scanCursor, nullPred,
+         * searchOpCtx);
+         * 
+         * try { while (scanCursor.hasNext()) { scanCursor.next();
+         * ITupleReference frameTuple = scanCursor.getTuple(); String rec =
+         * cmp.printTuple(frameTuple, btreeSerde); System.out.println(rec); } }
+         * catch (Exception e) { e.printStackTrace(); } finally {
+         * scanCursor.close(); }
+         */
+
+        btree.close();
+        bufferCache.closeFile(fileId);
+        bufferCache.close();
+    }
+}
diff --git a/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java
new file mode 100644
index 0000000..47c75cf
--- /dev/null
+++ b/hyracks-tests/hyracks-storage-am-invertedindex-test/src/test/java/edu/uci/ics/hyracks/storage/am/invertedindex/tokenizers/TokenizerTest.java
@@ -0,0 +1,194 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package edu.uci.ics.hyracks.storage.am.invertedindex.tokenizers;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.util.ArrayList;
+import java.util.Random;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunction;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ByteArrayAccessibleOutputStream;
+import edu.uci.ics.hyracks.dataflow.common.data.hash.UTF8StringBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+
+public class TokenizerTest {
+
+    // testing DelimitedUTF8StringBinaryTokenizer
+    @Test
+    public void test01() throws Exception {
+        Random rnd = new Random(50);
+
+        int numDocs = 100;
+        int maxWords = 1000;
+        int maxWordLength = 50;
+        char delimiter = ' ';
+
+        DelimitedUTF8StringBinaryTokenizer tok = new DelimitedUTF8StringBinaryTokenizer(delimiter);
+
+        // create a bunch of documents
+        for (int i = 0; i < numDocs; i++) {
+
+            // create a single document with a bunch of words
+            int words = (Math.abs(rnd.nextInt()) % maxWords) + 1;
+            StringBuilder strBuilder = new StringBuilder();
+            for (int j = 0; j < words; j++) {
+                int len = (Math.abs(rnd.nextInt()) % maxWordLength) + 1;
+                String s = randomString(len, rnd);
+                strBuilder.append(s);
+                if (j < words - 1)
+                    strBuilder.append(delimiter);
+            }
+
+            String doc = strBuilder.toString();
+
+            // serialize document into baaos
+            ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+            DataOutputStream dos = new DataOutputStream(baaos);
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(doc, dos);
+            byte[] data = baaos.toByteArray();
+
+            // use binary tokenizer and compare with Java tokenizer
+            String[] cmpTokens = doc.split(new String(new char[] { delimiter }));
+            int cmpCounter = 0;
+
+            tok.reset(data, 0, data.length);
+            while (tok.hasNext()) {
+                tok.next();
+
+                // write token to outputstream
+                ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
+                DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
+                tok.writeToken(dosWrite);
+
+                // deserialize token to get string object
+                ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
+                DataInput dataIn = new DataInputStream(inStream);
+                String s = UTF8StringSerializerDeserializer.INSTANCE.deserialize(dataIn);
+
+                Assert.assertEquals(s, cmpTokens[cmpCounter++]);
+            }
+        }
+    }
+
+    // testing HashedQGramUTF8StringBinaryTokenizer
+    @Test
+    public void test02() throws Exception {
+        Random rnd = new Random(50);
+
+        int numStrings = 1000;
+        int maxStrLen = 100;
+        int minQ = 2;
+        int maxQ = 10;
+
+        // we test the correctness of HashedQGramUTF8StringBinaryTokenizer as
+        // follows:
+        // 1.1. tokenize the string into q-gram strings
+        // 1.2. serialize q-gram strings into bytes
+        // 1.3. compute hashed gram with UTF8StringBinaryHashFunctionFactory
+        // 2.1. serialize string into bytes
+        // 2.2. tokenize serialized string into hashed q-grams
+        // 2.3. test whether hashed grams from 1.3. and 2.3. are equal
+        for (int i = 0; i < numStrings; i++) {
+            int q = (Math.abs(rnd.nextInt()) % (maxQ - minQ)) + minQ;
+            int strLen = (Math.abs(rnd.nextInt()) % (maxStrLen - q)) + q;
+            String str = randomString(strLen, rnd);
+
+            // randomly choose pre and postfixing
+            boolean prePost = false;
+            if (Math.abs(rnd.nextInt()) % 2 == 0)
+                prePost = true;
+
+            HashedQGramUTF8StringBinaryTokenizer qgramTok = new HashedQGramUTF8StringBinaryTokenizer(q, prePost);
+
+            String extendedString = str;
+            if (prePost) {
+                // pre and postfix string
+                StringBuilder strBuilder = new StringBuilder();
+                for (int j = 0; j < q - 1; j++)
+                    strBuilder.append(qgramTok.getPreChar());
+                strBuilder.append(str);
+                for (int j = 0; j < q - 1; j++)
+                    strBuilder.append(qgramTok.getPostChar());
+                extendedString = strBuilder.toString();
+            }
+
+            // generate q-grams in deserialized form
+            ArrayList<String> javaGrams = new ArrayList<String>();
+            for (int j = 0; j < extendedString.length() - q + 1; j++) {
+                javaGrams.add(extendedString.substring(j, j + q));
+            }
+
+            // serialize string for use in binary gram tokenizer
+            ByteArrayAccessibleOutputStream baaos = new ByteArrayAccessibleOutputStream();
+            DataOutputStream dos = new DataOutputStream(baaos);
+            UTF8StringSerializerDeserializer.INSTANCE.serialize(str, dos);
+            byte[] data = baaos.toByteArray();
+
+            qgramTok.reset(data, 0, data.length);
+
+            int counter = 0;
+            while (qgramTok.hasNext()) {
+                qgramTok.next();
+
+                // write token to outputstream
+                ByteArrayAccessibleOutputStream baaosWrite = new ByteArrayAccessibleOutputStream();
+                DataOutputStream dosWrite = new DataOutputStream(baaosWrite);
+                qgramTok.writeToken(dosWrite);
+
+                // deserialize token to get hashed gram
+                ByteArrayInputStream inStream = new ByteArrayInputStream(baaosWrite.toByteArray());
+                DataInput dataIn = new DataInputStream(inStream);
+                Integer binHashedGram = IntegerSerializerDeserializer.INSTANCE.deserialize(dataIn);
+
+                // create hashed gram to test against
+                ByteArrayAccessibleOutputStream baaosCmp = new ByteArrayAccessibleOutputStream();
+                DataOutputStream dosCmp = new DataOutputStream(baaosCmp);
+                UTF8StringSerializerDeserializer.INSTANCE.serialize(javaGrams.get(counter), dosCmp);
+
+                IBinaryHashFunction strHasher = UTF8StringBinaryHashFunctionFactory.INSTANCE.createBinaryHashFunction();
+                byte[] cmpData = baaosCmp.toByteArray();
+                int cmpHash = strHasher.hash(cmpData, 0, cmpData.length);
+
+                Assert.assertEquals(binHashedGram.intValue(), cmpHash);
+
+                counter++;
+            }
+        }
+    }
+
+    public static String randomString(int length, Random random) {
+        int maxAttempts = 1000;
+        int count = 0;
+        while (count < maxAttempts) {
+            String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+            StringBuilder strBuilder = new StringBuilder();
+            for (int i = 0; i < s.length() && i < length; i++) {
+                strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+            }
+            if (strBuilder.length() > 0)
+                return strBuilder.toString();
+            count++;
+        }
+        return "abc";
+    }
+}
diff --git a/hyracks-tests/pom.xml b/hyracks-tests/pom.xml
new file mode 100644
index 0000000..47e53ae
--- /dev/null
+++ b/hyracks-tests/pom.xml
@@ -0,0 +1,18 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+  <modelVersion>4.0.0</modelVersion>
+  <groupId>edu.uci.ics.hyracks</groupId>
+  <artifactId>hyracks-tests</artifactId>
+  <version>0.1.4-SNAPSHOT</version>
+  <packaging>pom</packaging>
+
+  <parent>
+    <groupId>edu.uci.ics.hyracks</groupId>
+    <artifactId>hyracks</artifactId>
+    <version>0.1.4-SNAPSHOT</version>
+  </parent>
+
+  <modules>
+    <module>hyracks-storage-am-btree-test</module>
+    <module>hyracks-storage-am-invertedindex-test</module>
+  </modules>
+</project>