Moved LSM-Tree code from grape into this branch. Modified code to make it compile (ported to new hyracks version).
git-svn-id: https://hyracks.googlecode.com/svn/branches/hyracks_lsm_tree@878 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
index 87d9b35..00575f4 100644
--- a/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
+++ b/hyracks-dataflow-common/src/main/java/edu/uci/ics/hyracks/dataflow/common/util/SerdeUtils.java
@@ -35,13 +35,41 @@
@SuppressWarnings("rawtypes")
public class SerdeUtils {
- public static ITypeTraits[] serdesToTypeTraits(ISerializerDeserializer[] serdes, int numSerdes) {
- ITypeTraits[] typeTraits = new ITypeTraits[numSerdes];
- for (int i = 0; i < numSerdes; i++) {
+ public static class PayloadTypeTraits implements ITypeTraits {
+ private static final long serialVersionUID = 1L;
+ final int payloadSize;
+
+ public PayloadTypeTraits(int payloadSize) {
+ this.payloadSize = payloadSize;
+ }
+
+ @Override
+ public boolean isFixedLength() {
+ return true;
+ }
+
+ @Override
+ public int getFixedLength() {
+ return payloadSize;
+ }
+ }
+
+ public static ITypeTraits[] serdesToTypeTraits(ISerializerDeserializer[] serdes) {
+ ITypeTraits[] typeTraits = new ITypeTraits[serdes.length];
+ for (int i = 0; i < serdes.length; i++) {
typeTraits[i] = serdeToTypeTrait(serdes[i]);
}
return typeTraits;
}
+
+ public static ITypeTraits[] serdesToTypeTraits(ISerializerDeserializer[] serdes, int payloadSize) {
+ ITypeTraits[] typeTraits = new ITypeTraits[serdes.length + 1];
+ for (int i = 0; i < serdes.length; i++) {
+ typeTraits[i] = serdeToTypeTrait(serdes[i]);
+ }
+ typeTraits[serdes.length] = new PayloadTypeTraits(payloadSize);
+ return typeTraits;
+ }
public static ITypeTraits serdeToTypeTrait(ISerializerDeserializer serde) {
if (serde instanceof IntegerSerializerDeserializer) {
diff --git a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
index 4113e9f..edcc6b2 100644
--- a/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
+++ b/hyracks-storage-am-btree/src/main/java/edu/uci/ics/hyracks/storage/am/btree/impls/BTree.java
@@ -1128,7 +1128,9 @@
return new BTreeAccessor(this);
}
- private class BTreeAccessor implements ITreeIndexAccessor {
+ // TODO: Class should be private. But currently we need to expose the
+ // setOpContext() API to the LSM Tree for it to work correctly.
+ public class BTreeAccessor implements ITreeIndexAccessor {
private BTree btree;
private BTreeOpContext ctx;
@@ -1167,5 +1169,13 @@
ctx.reset(IndexOp.DISKORDERSCAN);
btree.diskOrderScan(cursor, ctx);
}
+
+ // TODO: Ideally, this method should not exist. But we need it for
+ // the LSM tree to work correctly, so we can use the LSMOpContext inside
+ // a BTreeAccessor.
+ // Making the appropriate change will involve changing lots of code.
+ public void setOpContext(BTreeOpContext ctx) {
+ this.ctx = ctx;
+ }
}
}
diff --git a/hyracks-storage-am-lsmtree/pom.xml b/hyracks-storage-am-lsmtree/pom.xml
new file mode 100644
index 0000000..95f43d4
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/pom.xml
@@ -0,0 +1,49 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-lsmtree</artifactId>
+ <version>0.2.0-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks</artifactId>
+ <version>0.2.0-SNAPSHOT</version>
+ </parent>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-test-support</artifactId>
+ <version>0.2.0-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-btree</artifactId>
+ <version>0.2.0-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>junit</groupId>
+ <artifactId>junit</artifactId>
+ <version>4.8.1</version>
+ <type>jar</type>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/DataGenThread.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/DataGenThread.java
new file mode 100644
index 0000000..494534d
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/DataGenThread.java
@@ -0,0 +1,86 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.datagen;
+
+import java.io.IOException;
+import java.util.Random;
+import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.LinkedBlockingQueue;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+
+/**
+ * Quick & dirty data generator for performance testing.
+ *
+ */
+public class DataGenThread extends Thread {
+ public final BlockingQueue<TupleBatch> tupleBatchQueue;
+ private final int maxNumBatches;
+ private final int maxOutstandingBatches;
+ private int numBatches;
+ private final boolean sorted;
+ private final Random rnd;
+
+ // maxOutstandingBatches pre-created tuple-batches for populating the queue.
+ private TupleBatch[] tupleBatches;
+ private int ringPos;
+
+ public DataGenThread(int maxNumBatches, int batchSize, int maxOutstandingBatches, int numConsumers, ISerializerDeserializer[] fieldSerdes, int payloadSize, int rndSeed, boolean sorted) {
+ this.maxNumBatches = maxNumBatches;
+ this.maxOutstandingBatches = maxOutstandingBatches;
+ this.sorted = sorted;
+ rnd = new Random(rndSeed);
+ tupleBatches = new TupleBatch[maxOutstandingBatches];
+ IFieldValueGenerator[] fieldGens = new IFieldValueGenerator[fieldSerdes.length];
+ for (int i = 0; i < fieldSerdes.length; i++) {
+ fieldGens[i] = getFieldGenFromSerde(fieldSerdes[i]);
+ }
+ for (int i = 0; i < maxOutstandingBatches; i++) {
+ tupleBatches[i] = new TupleBatch(batchSize, fieldGens, fieldSerdes, payloadSize);
+ }
+ // make sure we don't overwrite tuples that are in use by consumers.
+ // -1 because we first generate a new tuple, and then try to put it into the queue.
+ int capacity = Math.max(maxOutstandingBatches - numConsumers - 1, 1);
+ tupleBatchQueue = new LinkedBlockingQueue<TupleBatch>(capacity);
+ ringPos = 0;
+ }
+
+ @Override
+ public void run() {
+ while(numBatches < maxNumBatches) {
+ try {
+ tupleBatches[ringPos].generate();
+ tupleBatchQueue.put(tupleBatches[ringPos]);
+ } catch (IOException e) {
+ e.printStackTrace();
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ numBatches++;
+ ringPos++;
+ if (ringPos >= maxOutstandingBatches) {
+ ringPos = 0;
+ }
+ }
+ }
+
+ public IFieldValueGenerator getFieldGenFromSerde(ISerializerDeserializer serde) {
+ if (serde instanceof IntegerSerializerDeserializer) {
+ if (sorted) {
+ return new SortedIntegerFieldValueGenerator();
+ } else {
+ return new IntegerFieldValueGenerator(rnd);
+ }
+ }
+ System.out.println("NULL");
+ //if (serde instanceof Integer64SerializerDeserializer) {
+ // throw new UnsupportedOperationException("Binary comparator factory for Integer64 not implemented.");
+ //}
+ //if (serde instanceof FloatSerializerDeserializer) {
+ // return FloatBinaryComparatorFactory.INSTANCE;
+ //}
+ //if (serde instanceof DoubleSerializerDeserializer) {
+ // return DoubleBinaryComparatorFactory.INSTANCE;
+ //}
+ return null;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/IFieldValueGenerator.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/IFieldValueGenerator.java
new file mode 100644
index 0000000..32e6ab3
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/IFieldValueGenerator.java
@@ -0,0 +1,5 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.datagen;
+
+public interface IFieldValueGenerator<T> {
+ public T next();
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/IntegerFieldValueGenerator.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/IntegerFieldValueGenerator.java
new file mode 100644
index 0000000..aa21959
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/IntegerFieldValueGenerator.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.datagen;
+
+import java.util.Random;
+
+public class IntegerFieldValueGenerator implements IFieldValueGenerator<Integer> {
+ protected final Random rnd;
+
+ public IntegerFieldValueGenerator(Random rnd) {
+ this.rnd = rnd;
+ }
+
+ @Override
+ public Integer next() {
+ return rnd.nextInt();
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/SortedIntegerFieldValueGenerator.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/SortedIntegerFieldValueGenerator.java
new file mode 100644
index 0000000..cf42490
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/SortedIntegerFieldValueGenerator.java
@@ -0,0 +1,17 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.datagen;
+
+public class SortedIntegerFieldValueGenerator implements IFieldValueGenerator<Integer> {
+ private int val = 0;
+
+ public SortedIntegerFieldValueGenerator() {
+ }
+
+ public SortedIntegerFieldValueGenerator(int startVal) {
+ val = startVal;
+ }
+
+ @Override
+ public Integer next() {
+ return val++;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/TupleBatch.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/TupleBatch.java
new file mode 100644
index 0000000..7363854
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/TupleBatch.java
@@ -0,0 +1,33 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.datagen;
+
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class TupleBatch {
+ private final int size;
+ private final TupleGenerator[] tupleGens;
+
+ public TupleBatch(int size, IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {
+ this.size = size;
+ tupleGens = new TupleGenerator[size];
+ for (int i = 0; i < size; i++) {
+ tupleGens[i] = new TupleGenerator(fieldGens, fieldSerdes, payloadSize);
+ }
+ }
+
+ public void generate() throws IOException {
+ for(TupleGenerator tupleGen : tupleGens) {
+ tupleGen.next();
+ }
+ }
+
+ public int size() {
+ return size;
+ }
+
+ public ITupleReference get(int ix) {
+ return tupleGens[ix].get();
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/TupleGenerator.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/TupleGenerator.java
new file mode 100644
index 0000000..c035f9d
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/datagen/TupleGenerator.java
@@ -0,0 +1,51 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.datagen;
+
+import java.io.DataOutput;
+import java.io.IOException;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+@SuppressWarnings({"rawtypes", "unchecked" })
+public class TupleGenerator {
+ protected final ISerializerDeserializer[] fieldSerdes;
+ protected final IFieldValueGenerator[] fieldGens;
+ protected final ArrayTupleBuilder tb;
+ protected final ArrayTupleReference tuple;
+ protected final byte[] payload;
+ protected final DataOutput tbDos;
+
+ public TupleGenerator(IFieldValueGenerator[] fieldGens, ISerializerDeserializer[] fieldSerdes, int payloadSize) {
+ this.fieldSerdes = fieldSerdes;
+ this.fieldGens = fieldGens;
+ tuple = new ArrayTupleReference();
+ if (payloadSize > 0) {
+ tb = new ArrayTupleBuilder(fieldSerdes.length + 1);
+ payload = new byte[payloadSize];
+ } else {
+ tb = new ArrayTupleBuilder(fieldSerdes.length);
+ payload = null;
+ }
+ tbDos = tb.getDataOutput();
+ }
+
+ public ITupleReference next() throws IOException {
+ tb.reset();
+ for (int i = 0; i < fieldSerdes.length; i++) {
+ fieldSerdes[i].serialize(fieldGens[i].next(), tbDos);
+ tb.addFieldEndOffset();
+ }
+ if (payload != null) {
+ tbDos.write(payload);
+ tb.addFieldEndOffset();
+ }
+ tuple.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ return tuple;
+ }
+
+ public ITupleReference get() {
+ return tuple;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/AbstractLSMTreeTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/AbstractLSMTreeTest.java
new file mode 100644
index 0000000..17de58c
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/AbstractLSMTreeTest.java
@@ -0,0 +1,76 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.Random;
+import java.util.logging.Logger;
+
+import org.junit.After;
+import org.junit.Before;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public abstract class AbstractLSMTreeTest {
+ protected static final Logger LOGGER = Logger.getLogger(AbstractLSMTreeTest.class.getName());
+ public static final long RANDOM_SEED = 50;
+
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+ private static final int MAX_OPEN_FILES = 10;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+
+ protected IHyracksTaskContext ctx;
+ protected IBufferCache bufferCache;
+ protected int btreeFileId;
+
+ protected final Random rnd = new Random();
+ protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+ protected final static String sep = System.getProperty("file.separator");
+ protected String fileName;
+
+ @Before
+ public void setUp() throws HyracksDataException {
+ fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+ ctx = TestUtils.create(getHyracksFrameSize());
+ TestStorageManagerComponentHolder.init(getPageSize(), getNumPages(), getMaxOpenFiles());
+ bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ btreeFileId = fmp.lookupFileId(file);
+ bufferCache.openFile(btreeFileId);
+ rnd.setSeed(RANDOM_SEED);
+ }
+
+ @After
+ public void tearDown() throws HyracksDataException {
+ bufferCache.closeFile(btreeFileId);
+ bufferCache.close();
+ File f = new File(fileName);
+ f.deleteOnExit();
+ }
+
+ public int getPageSize() {
+ return PAGE_SIZE;
+ }
+
+ public int getNumPages() {
+ return NUM_PAGES;
+ }
+
+ public int getHyracksFrameSize() {
+ return HYRACKS_FRAME_SIZE;
+ }
+
+ public int getMaxOpenFiles() {
+ return MAX_OPEN_FILES;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/BTreeFactory.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/BTreeFactory.java
new file mode 100644
index 0000000..1584f2c
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/BTreeFactory.java
@@ -0,0 +1,34 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+public class BTreeFactory {
+
+ private IBufferCache bufferCache;
+ private int fieldCount;
+ private MultiComparator cmp;
+ private ITreeIndexFrameFactory interiorFrameFactory;
+ private ITreeIndexFrameFactory leafFrameFactory;
+ private FreePageManagerFactory freePageManagerFactory;
+
+
+ public BTreeFactory(IBufferCache bufferCache, FreePageManagerFactory freePageManagerFactory, MultiComparator cmp,
+ int fieldCount, ITreeIndexFrameFactory interiorFrameFactory, ITreeIndexFrameFactory leafFrameFactory) {
+ this.bufferCache = bufferCache;
+ this.fieldCount = fieldCount;
+ this.cmp = cmp;
+ this.interiorFrameFactory = interiorFrameFactory;
+ this.leafFrameFactory = leafFrameFactory;
+ this.freePageManagerFactory = freePageManagerFactory;
+ }
+
+ public BTree createBTreeInstance(int fileId) {
+ return new BTree(bufferCache, fieldCount, cmp, freePageManagerFactory.createFreePageManager(fileId),
+ interiorFrameFactory, leafFrameFactory);
+ }
+
+
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/FreePageManagerFactory.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/FreePageManagerFactory.java
new file mode 100644
index 0000000..b88cc88
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/FreePageManagerFactory.java
@@ -0,0 +1,23 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+
+
+public class FreePageManagerFactory {
+
+ private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
+ private final IBufferCache bufferCache;
+
+ public FreePageManagerFactory(IBufferCache bufferCache, ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
+ this.metaDataFrameFactory = metaDataFrameFactory;
+ this.bufferCache = bufferCache;
+ }
+
+ public IFreePageManager createFreePageManager(int fileId) {
+ return new LinkedListFreePageManager(bufferCache, fileId, 0, metaDataFrameFactory);
+ }
+
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/ILSMTreeTupleReference.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/ILSMTreeTupleReference.java
new file mode 100644
index 0000000..09ca355
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/ILSMTreeTupleReference.java
@@ -0,0 +1,7 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+
+public interface ILSMTreeTupleReference extends ITreeIndexTupleReference {
+ public boolean isDelete();
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InDiskTreeInfo.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InDiskTreeInfo.java
new file mode 100644
index 0000000..e8d1137
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InDiskTreeInfo.java
@@ -0,0 +1,16 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+
+public class InDiskTreeInfo {
+
+ private BTree bTree;
+
+ public InDiskTreeInfo(BTree bTree) {
+ this.bTree = bTree;
+ }
+
+ public BTree getBTree() {
+ return bTree;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCache.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCache.java
new file mode 100644
index 0000000..1849f06
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCache.java
@@ -0,0 +1,146 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.nio.ByteBuffer;
+import java.util.concurrent.locks.ReadWriteLock;
+import java.util.concurrent.locks.ReentrantReadWriteLock;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCacheInternal;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPageInternal;
+import edu.uci.ics.hyracks.storage.common.file.BufferedFileHandle;
+
+public class InMemoryBufferCache implements IBufferCacheInternal {
+
+ private final int pageSize;
+ private final int numPages;
+ private final CachedPage[] cachedPages;
+
+ //Constructor
+ public InMemoryBufferCache(ICacheMemoryAllocator allocator, int pageSize, int numPages){
+
+ this.pageSize = pageSize;
+ this.numPages = numPages;
+ ByteBuffer[] buffers = allocator.allocate(this.pageSize, this.numPages);
+ cachedPages = new CachedPage[buffers.length];
+ for (int i = 0; i < buffers.length; ++i) {
+ cachedPages[i] = new CachedPage(i, buffers[i]);
+ }
+ }
+
+ @Override
+ public void createFile(FileReference fileRef) throws HyracksDataException {
+ // Do nothing
+ }
+
+ @Override
+ public void openFile(int fileId) throws HyracksDataException {
+ // Do nothing
+ }
+
+ @Override
+ public void closeFile(int fileId) throws HyracksDataException {
+ // Do nothing
+ }
+
+ @Override
+ public void deleteFile(int fileId) throws HyracksDataException {
+ // Do nothing
+ }
+
+ @Override
+ public ICachedPage tryPin(long dpid) throws HyracksDataException {
+ // Just call pin!
+ return null;
+ }
+
+ @Override
+ public ICachedPage pin(long dpid, boolean newPage){
+ return cachedPages[BufferedFileHandle.getPageId(dpid)];
+ }
+
+ @Override
+ public void unpin(ICachedPage page) throws HyracksDataException {
+ //Do Nothing
+ }
+
+ @Override
+ public int getPageSize() {
+ return pageSize;
+ }
+
+ @Override
+ public int getNumPages() {
+ return numPages;
+ }
+
+ @Override
+ public void close() {
+ // Do nothing
+ }
+
+ @Override
+ public ICachedPageInternal getPage(int cpid) {
+ return cachedPages[cpid];
+ }
+
+ private class CachedPage implements ICachedPageInternal {
+ private final int cpid;
+ private final ByteBuffer buffer;
+ private final ReadWriteLock latch;
+
+ public CachedPage(int cpid, ByteBuffer buffer) {
+ this.cpid = cpid;
+ this.buffer = buffer;
+ latch = new ReentrantReadWriteLock(true);
+ }
+
+ @Override
+ public ByteBuffer getBuffer() {
+ return buffer;
+ }
+
+ @Override
+ public Object getReplacementStrategyObject() {
+ //Do nothing
+ return null;
+ }
+
+ @Override
+ public boolean pinIfGoodVictim() {
+ //Do nothing
+ return false;
+ }
+
+ @Override
+ public int getCachedPageId() {
+ return cpid;
+ }
+
+ @Override
+ public void acquireReadLatch() {
+ latch.readLock().lock();
+ }
+
+ private void acquireWriteLatch(boolean markDirty) {
+ latch.writeLock().lock();
+ }
+
+ @Override
+ public void acquireWriteLatch() {
+ acquireWriteLatch(true);
+ }
+
+ @Override
+ public void releaseReadLatch() {
+ latch.readLock().unlock();
+ }
+
+ @Override
+ public void releaseWriteLatch() {
+ latch.writeLock().unlock();
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCacheFactory.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCacheFactory.java
new file mode 100644
index 0000000..21ec0da
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCacheFactory.java
@@ -0,0 +1,26 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+
+public class InMemoryBufferCacheFactory {
+
+ private IBufferCache bufferCache;
+ private final int pageSize;
+ private final int numPages;
+
+ public InMemoryBufferCacheFactory(int pageSize, int numPages) {
+ this.pageSize = pageSize;
+ this.numPages = numPages;
+ bufferCache = null;
+ }
+
+ public synchronized IBufferCache createInMemoryBufferCache() {
+ if (bufferCache == null) {
+ ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+ bufferCache = new InMemoryBufferCache(allocator, pageSize, numPages);
+ }
+ return bufferCache;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCacheTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCacheTest.java
new file mode 100644
index 0000000..5b1a409
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryBufferCacheTest.java
@@ -0,0 +1,117 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import static org.junit.Assert.fail;
+
+import org.junit.*;
+
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCacheInternal;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class InMemoryBufferCacheTest{
+
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+
+ //TEST InMemoryBufferCache.pin()
+ @Test
+ public void InMemoryBufferCacheTest01() throws Exception {
+
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ try {
+ ICachedPage memCachedPage = memBufferCache.pin(10, true);
+ if(memCachedPage != null) {
+ return;
+ }
+ else {
+ fail("fail to pin");
+ }
+ }
+ catch (ArrayIndexOutOfBoundsException e){
+ System.out.println("Catch exception: " + e);
+ return;
+ }
+ catch (Exception e) {
+ fail("Unexpected exception!");
+ }
+ }
+
+ //TEST InMemoryBufferCache.pin()
+ //expect ArrayIndexOutOfBoundsException
+ @Test
+ public void InMemoryBufferCacheTest02() throws Exception {
+
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ try {
+ ICachedPage memCachedPage = memBufferCache.pin(500, true);
+ if(memCachedPage != null) {
+ return;
+ }
+ else {
+ fail("fail to pin");
+ }
+ }
+ catch (ArrayIndexOutOfBoundsException e){
+ System.out.println("Catch exception: " + e);
+ return;
+ }
+ catch (Exception e) {
+ fail("Unexpected exception!");
+ }
+ }
+
+ //TEST InMemoryBufferCache.getPage()
+ @Test
+ public void InMemoryBufferCacheTest03() throws Exception {
+
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ try {
+ ICachedPage memCachedPage = ((IBufferCacheInternal) memBufferCache).getPage(20);
+ if(memCachedPage != null) {
+ return;
+ }
+ else {
+ fail("fail to pin");
+ }
+ }
+ catch (ArrayIndexOutOfBoundsException e){
+ System.out.println("Catch exception: " + e);
+ return;
+ }
+ catch (Exception e) {
+ fail("Unexpected exception!");
+ }
+ }
+
+ //TEST InMemoryBufferCache.getPage()
+ //expect ArrayIndexOutOfBoundsException
+ @Test
+ public void InMemoryBufferCacheTest04() throws Exception {
+
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ try {
+ ICachedPage memCachedPage = ((IBufferCacheInternal) memBufferCache).getPage(1000);
+ if(memCachedPage != null) {
+ return;
+ }
+ else {
+ fail("fail to pin");
+ }
+ }
+ catch (ArrayIndexOutOfBoundsException e){
+ System.out.println("Catch exception: " + e);
+ return;
+ }
+ catch (Exception e) {
+ fail("Unexpected exception!");
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryFreePageManager.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryFreePageManager.java
new file mode 100644
index 0000000..42c4bf3
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryFreePageManager.java
@@ -0,0 +1,87 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+
+public class InMemoryFreePageManager implements IFreePageManager{
+ private final int maxCapacity;
+ private int currentCapacity;
+ private final ITreeIndexMetaDataFrameFactory metaDataFrameFactory;
+
+ public InMemoryFreePageManager(int maxCapacity, ITreeIndexMetaDataFrameFactory metaDataFrameFactory) {
+ this.maxCapacity = maxCapacity-1; // Since the range of CacheArray in InMemoryBufferCache is 0 ~ maxCapacity-1
+ currentCapacity = 1;
+ this.metaDataFrameFactory = metaDataFrameFactory;
+ }
+
+ public int getCurrentCapacity() {
+ return currentCapacity;
+ }
+
+ @Override
+ public synchronized int getFreePage(ITreeIndexMetaDataFrame metaFrame)
+ throws HyracksDataException, PageAllocationException {
+
+ if(currentCapacity == maxCapacity) {
+ throw new PageAllocationException("In-mem tree capacity reaches max capacity");
+ }
+ currentCapacity++;
+ return currentCapacity;
+ }
+
+
+ @Override
+ public void addFreePage(ITreeIndexMetaDataFrame metaFrame, int freePage)
+ throws HyracksDataException {
+ System.out.println("InMemoryFreePageManager.addFreePage()");
+ }
+
+ @Override
+ public int getMaxPage(ITreeIndexMetaDataFrame metaFrame)
+ throws HyracksDataException {
+ System.out.println("InMemoryFreePageManager.getMaxPage()");
+ return 0;
+ }
+
+ @Override
+ public void init(ITreeIndexMetaDataFrame metaFrame, int currentMaxPage)
+ throws HyracksDataException {
+ currentCapacity = 1;
+ }
+
+ @Override
+ public ITreeIndexMetaDataFrameFactory getMetaDataFrameFactory() {
+ return metaDataFrameFactory;
+ }
+
+ @Override
+ public byte getMetaPageLevelIndicator() {
+ System.out.println("InMemoryFreePageManager.getMetaPageLevelIndicator()");
+ return 0;
+ }
+
+ @Override
+ public byte getFreePageLevelIndicator() {
+ System.out.println("InMemoryFreePageManager.getFreePageLevelIndicator()");
+ return 0;
+ }
+
+ @Override
+ public boolean isMetaPage(ITreeIndexMetaDataFrame metaFrame) {
+ System.out.println("InMemoryFreePageManager.isMetaPage()");
+ return false;
+ }
+
+ @Override
+ public boolean isFreePage(ITreeIndexMetaDataFrame metaFrame) {
+ System.out.println("InMemoryFreePageManager.isFreePage()");
+ return false;
+ }
+
+ public void reset(){
+ currentCapacity = 1;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryFreePageManagerTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryFreePageManagerTest.java
new file mode 100644
index 0000000..d61735f
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/InMemoryFreePageManagerTest.java
@@ -0,0 +1,56 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import static org.junit.Assert.fail;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+
+public class InMemoryFreePageManagerTest{
+
+ //Get the free pages
+ @Test
+ public void InMemoryFreePageManagerTest01() throws Exception {
+
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(10, metaFrameFactory);
+
+ try{
+ for(int i = 0; i < 5; i++) {
+ memFreePageManager.getFreePage(null);
+ }
+ }
+ catch (PageAllocationException e){
+ System.out.println("Catch exception: " + e);
+ return;
+ }
+ catch (Exception e) {
+ fail("Unexpected exception!");
+ }
+ }
+
+ //Get free pages more than the max capacity
+ //expect PageAllocationException
+ @Test
+ public void InMemoryFreePageManagerTest02() throws Exception {
+
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(10, metaFrameFactory);
+
+ try{
+ for(int i = 0; i < 20; i++) {
+ memFreePageManager.getFreePage(null);
+ }
+ }
+ catch (PageAllocationException e){
+ System.out.println("Catch exception: " + e);
+ return;
+ }
+ catch (Exception e) {
+ fail("Unexpected exception!");
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMEntireTupleWriter.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMEntireTupleWriter.java
new file mode 100644
index 0000000..793ec04
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMEntireTupleWriter.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+
+public class LSMEntireTupleWriter extends TypeAwareTupleWriter {
+ public LSMEntireTupleWriter(ITypeTraits[] typeTraits){
+ super(typeTraits);
+ }
+ @Override
+ protected int getNullFlagsBytes(ITupleReference tuple) {
+ //+1.0 is for insert/delete tuple checking
+ return (int) Math.ceil(((double) tuple.getFieldCount() + 1.0) / 8.0);
+ }
+
+ @Override
+ public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+ int tupleSize = this.bytesRequired(tuple);
+ byte[] buf = tuple.getFieldData(0);
+ int tupleStartOff = ((LSMTypeAwareTupleReference)tuple).getTupleStart();
+ System.arraycopy(buf, tupleStartOff, targetBuf, targetOff, tupleSize);
+ return tupleSize;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMEntireTupleWriterFactory.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMEntireTupleWriterFactory.java
new file mode 100644
index 0000000..de7befa
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMEntireTupleWriterFactory.java
@@ -0,0 +1,20 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+
+public class LSMEntireTupleWriterFactory extends TypeAwareTupleWriterFactory {
+ private static final long serialVersionUID = 1L;
+ private ITypeTraits[] typeTraits;
+
+ public LSMEntireTupleWriterFactory(ITypeTraits[] typeTraits) {
+ super(typeTraits);
+ this.typeTraits = typeTraits;
+ }
+
+ @Override
+ public ITreeIndexTupleWriter createTupleWriter() {
+ return new LSMEntireTupleWriter(typeTraits);
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMPriorityQueueComparator.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMPriorityQueueComparator.java
new file mode 100644
index 0000000..1bb5abb
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMPriorityQueueComparator.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.util.Comparator;
+
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public class LSMPriorityQueueComparator implements Comparator<LSMPriorityQueueElement> {
+
+ private MultiComparator cmp;
+
+ public LSMPriorityQueueComparator(MultiComparator cmp) {
+ this.cmp = cmp;
+ }
+
+ @Override
+ public int compare(LSMPriorityQueueElement elementA, LSMPriorityQueueElement elementB) {
+
+ int result = cmp.compare(elementA.getTuple(), elementB.getTuple());
+
+ if(result == 1) {
+ return 1;
+ }
+ else if(result == -1) {
+ return -1;
+ }
+ else {
+ if(elementA.getCursorIndex() > elementB.getCursorIndex()) {
+ return 1;
+ }
+ else {
+ return -1;
+ }
+ }
+ }
+
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMPriorityQueueElement.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMPriorityQueueElement.java
new file mode 100644
index 0000000..c8e3241
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMPriorityQueueElement.java
@@ -0,0 +1,25 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+
+public class LSMPriorityQueueElement {
+ private ITupleReference tuple;
+ private int cursorIndex;
+
+ public LSMPriorityQueueElement(ITupleReference tuple, int cursorIndex) {
+ reset(tuple, cursorIndex);
+ }
+
+ public ITupleReference getTuple() {
+ return tuple;
+ }
+
+ public int getCursorIndex() {
+ return cursorIndex;
+ }
+
+ public void reset(ITupleReference tuple, int cursorIndex) {
+ this.tuple = tuple;
+ this.cursorIndex = cursorIndex;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTree.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTree.java
new file mode 100644
index 0000000..fb5d741
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTree.java
@@ -0,0 +1,583 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.File;
+import java.util.LinkedList;
+import java.util.ListIterator;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeDuplicateKeyException;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndex;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IndexType;
+import edu.uci.ics.hyracks.storage.am.common.api.PageAllocationException;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+
+public class LSMTree implements ITreeIndex {
+
+ private final IBufferCache bufferCache;
+ private BTree memBTree;
+ private String fileName;
+ private int fileId;
+ private boolean created;
+
+ private final IFreePageManager memFreePageManager;
+ private final ITreeIndexFrameFactory interiorFrameFactory;
+ private final ITreeIndexFrameFactory insertLeafFrameFactory;
+ private final ITreeIndexFrameFactory deleteLeafFrameFactory;
+ private final MultiComparator cmp;
+
+ // TODO: change to private, it's public only for LSMTreeSearchTest
+ public LinkedList<InDiskTreeInfo> inDiskTreeInfoList;
+ private LinkedList<InDiskTreeInfo> mergedInDiskTreeInfoList;
+ private int inDiskTreeCounter;
+ private final BTreeFactory bTreeFactory;
+ private final IFileMapManager fileMapManager;
+ private int threadReferenceCounter;
+ private boolean flushFlag;
+
+ public LSMTree(IBufferCache memCache, IBufferCache bufferCache, int fieldCount, MultiComparator cmp,
+ IFreePageManager memFreePageManager, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexFrameFactory insertLeafFrameFactory, ITreeIndexFrameFactory deleteLeafFrameFactory,
+ BTreeFactory bTreeFactory, IFileMapManager fileMapManager) {
+ this.bufferCache = bufferCache;
+ this.cmp = cmp;
+ this.interiorFrameFactory = interiorFrameFactory;
+ this.insertLeafFrameFactory = insertLeafFrameFactory;
+ this.deleteLeafFrameFactory = deleteLeafFrameFactory;
+ this.memFreePageManager = memFreePageManager;
+ this.bTreeFactory = bTreeFactory;
+ this.inDiskTreeInfoList = new LinkedList<InDiskTreeInfo>();
+ this.inDiskTreeCounter = 0;
+ this.fileMapManager = fileMapManager;
+ this.threadReferenceCounter = 0;
+ this.created = false;
+ this.flushFlag = false;
+
+ try {
+ this.fileName = this.fileMapManager.lookupFileName(this.fileId).toString();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ memBTree = new BTree(memCache, fieldCount, cmp, memFreePageManager, interiorFrameFactory,
+ insertLeafFrameFactory);
+ }
+
+ @Override
+ public void create(int indexFileId) throws HyracksDataException {
+ if (created) {
+ return;
+ } else {
+ memBTree.create(indexFileId);
+ this.fileId = indexFileId;
+ created = true;
+ }
+ }
+
+ @Override
+ public void open(int indexFileId) {
+ memBTree.open(indexFileId);
+ this.fileId = indexFileId;
+ }
+
+ @Override
+ public void close() {
+ memBTree.close();
+ this.fileId = -1;
+ }
+
+ private void lsmPerformOp(ITupleReference tuple, LSMTreeOpContext ctx) throws Exception {
+ boolean continuePerformOp = false;
+ try {
+ while (continuePerformOp == false) {
+ synchronized (this) {
+ if (!flushFlag) {
+ threadReferenceCounter++;
+ continuePerformOp = true;
+ }
+ }
+ }
+ ctx.memBtreeAccessor.insert(tuple);
+ decreaseThreadReferenceCounter();
+ } catch (BTreeDuplicateKeyException e) {
+ ctx.reset(IndexOp.UPDATE);
+ // We don't need to deal with a nonexistent key here, because a
+ // deleter will actually update the key and it's value, and not
+ // delete it from the BTree.
+ ctx.memBtreeAccessor.update(tuple);
+ decreaseThreadReferenceCounter();
+ } catch (PageAllocationException e) {
+ synchronized (this) {
+ // If flushFlag is false it means we are the first inserter to
+ // trigger the flush. If flushFlag is already set to true,
+ // there's no harm in setting it to true again.
+ flushFlag = true;
+ threadReferenceCounter--;
+ if (threadReferenceCounter == 0) {
+ flushInMemoryBtree();
+ ctx.reset();
+ ctx.memBtreeAccessor.insert(tuple);
+ flushFlag = false;
+ return;
+ } else if (threadReferenceCounter < 0) {
+ throw new Error("Thread reference counter is below zero. This indicates a programming error!");
+ }
+ }
+ lsmPerformOp(tuple, ctx);
+ return;
+ }
+ }
+
+ public void decreaseThreadReferenceCounter() throws Exception {
+ synchronized (this) {
+ threadReferenceCounter--;
+ if (flushFlag == true) {
+ if (threadReferenceCounter == 0) {
+ flushInMemoryBtree();
+ flushFlag = false;
+ return;
+ } else if (threadReferenceCounter < 0) {
+ throw new Error("Thread reference counter is below zero. This indicates a programming error!");
+ }
+ }
+ }
+ }
+
+ private String getNextFileName() {
+ int newDiskBTreeId = inDiskTreeCounter++;
+ String LSMFileName = new String(this.fileName);
+ return LSMFileName.concat("-" + Integer.toString(newDiskBTreeId));
+ }
+
+ public void flushInMemoryBtree() throws Exception {
+ // read the tuple from InMemoryBtree, and bulkload into the disk
+
+ // scan
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(
+ (IBTreeLeafFrame) insertLeafFrameFactory.createFrame(), false);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ ITreeIndexAccessor memBtreeAccessor = memBTree.createAccessor();
+ memBtreeAccessor.search(scanCursor, nullPred);
+
+ // Create a new in-Disk BTree, which have full fillfactor.
+
+ // Register the BTree information into system.
+ FileReference file = new FileReference(new File(getNextFileName()));
+ // TODO: Delete the file during cleanup.
+ bufferCache.createFile(file);
+ int newDiskBTreeId = fileMapManager.lookupFileId(file);
+ // TODO: Close the file during cleanup.
+ bufferCache.openFile(newDiskBTreeId);
+
+ // Create new in-Disk Btree.
+ BTree inDiskBTree = this.bTreeFactory.createBTreeInstance(newDiskBTreeId);
+ inDiskBTree.create(newDiskBTreeId);
+ // TODO: Close the BTree during cleanup.
+ inDiskBTree.open(newDiskBTreeId);
+
+ // BulkLoad the tuples from the in-memory tree into the new disk BTree.
+ IIndexBulkLoadContext bulkLoadCtx = inDiskBTree.beginBulkLoad(1.0f);
+ try {
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ inDiskBTree.bulkLoadAddTuple(frameTuple, bulkLoadCtx);
+ }
+ } finally {
+ scanCursor.close();
+ }
+ inDiskBTree.endBulkLoad(bulkLoadCtx);
+
+ // After BulkLoading, Clear the in-memTree
+ resetInMemoryTree();
+
+ InDiskTreeInfo newLinkedListNode = new InDiskTreeInfo(inDiskBTree);
+ synchronized (inDiskTreeInfoList) {
+ inDiskTreeInfoList.addFirst(newLinkedListNode);
+ }
+ }
+
+ private void insert(ITupleReference tuple, LSMTreeOpContext ctx) throws HyracksDataException, TreeIndexException,
+ PageAllocationException {
+ try {
+ lsmPerformOp(tuple, ctx);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ private void delete(ITupleReference tuple, LSMTreeOpContext ctx) throws HyracksDataException, TreeIndexException,
+ PageAllocationException {
+ try {
+ lsmPerformOp(tuple, ctx);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ @Override
+ public IIndexBulkLoadContext beginBulkLoad(float fillFactor) throws TreeIndexException, HyracksDataException,
+ PageAllocationException {
+ return null;
+ }
+
+ @Override
+ public void bulkLoadAddTuple(ITupleReference tuple, IIndexBulkLoadContext ictx) throws HyracksDataException,
+ PageAllocationException {
+ }
+
+ @Override
+ public void endBulkLoad(IIndexBulkLoadContext ictx) throws HyracksDataException {
+ }
+
+ public void search(ITreeIndexCursor cursor, RangePredicate pred, LSMTreeOpContext ctx) throws Exception {
+ int numberOfInDiskTrees;
+ ListIterator<InDiskTreeInfo> inDiskTreeInfoListIterator;
+ boolean continuePerformOp = false;
+
+ ctx.reset(IndexOp.SEARCH);
+
+ while (continuePerformOp == false) {
+ synchronized (this) {
+ if (!flushFlag) {
+ threadReferenceCounter++;
+ continuePerformOp = true;
+ }
+ }
+ }
+
+ // in-disk
+ synchronized (inDiskTreeInfoList) {
+ numberOfInDiskTrees = inDiskTreeInfoList.size();
+ inDiskTreeInfoListIterator = inDiskTreeInfoList.listIterator();
+ }
+
+ LSMTreeCursorInitialState initialState = new LSMTreeCursorInitialState(numberOfInDiskTrees + 1,
+ insertLeafFrameFactory, cmp, this);
+ cursor.open(initialState, pred);
+
+ BTree[] onDiskBtrees = new BTree[numberOfInDiskTrees];
+ ITreeIndexAccessor[] onDiskBtreeAccessors = new ITreeIndexAccessor[numberOfInDiskTrees];
+
+ for (int i = 0; i < numberOfInDiskTrees; i++) {
+ // get btree instances for in-disk trees
+ if (inDiskTreeInfoListIterator.hasNext()) {
+ onDiskBtrees[i] = ((InDiskTreeInfo) inDiskTreeInfoListIterator.next()).getBTree();
+ } else {
+ throw new HyracksDataException("Cannot find in-disk tree instance");
+ }
+ onDiskBtreeAccessors[i] = onDiskBtrees[i].createAccessor();
+ onDiskBtreeAccessors[i].search(((LSMTreeRangeSearchCursor) cursor).getCursor(i + 1), pred);
+ }
+
+ // in-memory
+ ctx.memBtreeAccessor.search(((LSMTreeRangeSearchCursor) cursor).getCursor(0), pred);
+
+ LSMPriorityQueueComparator LSMPriorityQueueCmp = new LSMPriorityQueueComparator(cmp);
+ ((LSMTreeRangeSearchCursor) cursor).initPriorityQueue(numberOfInDiskTrees + 1, LSMPriorityQueueCmp);
+ }
+
+ public void merge() throws Exception {
+ ITreeIndexCursor rangeCursor = new LSMTreeRangeSearchCursor();
+ RangePredicate rangePred = new RangePredicate(true, null, null, true, true, null, null);
+
+ // Cursor setting -- almost the same as search, only difference is
+ // "no cursor for in-memory tree"
+ int numberOfInDiskTrees;
+ ListIterator<InDiskTreeInfo> inDiskTreeInfoListIterator;
+ boolean continuePerformOp = false;
+ while (continuePerformOp == false) {
+ synchronized (this) {
+ if (!flushFlag) {
+ threadReferenceCounter++;
+ continuePerformOp = true;
+ }
+ }
+ }
+
+ synchronized (inDiskTreeInfoList) {
+ numberOfInDiskTrees = inDiskTreeInfoList.size();
+ inDiskTreeInfoListIterator = inDiskTreeInfoList.listIterator();
+ }
+
+ LSMTreeCursorInitialState initialState = new LSMTreeCursorInitialState(numberOfInDiskTrees,
+ insertLeafFrameFactory, cmp, this);
+ rangeCursor.open(initialState, rangePred);
+
+ BTree[] onDiskBtrees = new BTree[numberOfInDiskTrees];
+ ITreeIndexAccessor[] onDiskBtreeAccessors = new ITreeIndexAccessor[numberOfInDiskTrees];
+
+ for (int i = 0; i < numberOfInDiskTrees; i++) {
+ // get btree instances for in-disk trees
+ if (inDiskTreeInfoListIterator.hasNext()) {
+ onDiskBtrees[i] = ((InDiskTreeInfo) inDiskTreeInfoListIterator.next()).getBTree();
+ } else {
+ throw new Exception("Cannot find in-disk tree instance");
+ }
+ onDiskBtreeAccessors[i] = onDiskBtrees[i].createAccessor();
+ onDiskBtreeAccessors[i].search(((LSMTreeRangeSearchCursor) rangeCursor).getCursor(i), rangePred);
+ }
+
+ LSMPriorityQueueComparator LSMPriorityQueueCmp = new LSMPriorityQueueComparator(cmp);
+ ((LSMTreeRangeSearchCursor) rangeCursor).initPriorityQueue(numberOfInDiskTrees, LSMPriorityQueueCmp);
+ // End of Cursor setting
+
+ // Create a new Merged BTree, which have full fillfactor.
+ // Register the BTree information into system.
+ // TODO: change the naming schema for the new tree
+ FileReference file = new FileReference(new File(getNextFileName()));
+ // TODO: Delete the file during cleanup.
+ bufferCache.createFile(file);
+ int mergedBTreeId = fileMapManager.lookupFileId(file);
+ // TODO: Close the file during cleanup.
+ bufferCache.openFile(mergedBTreeId);
+
+ // Create new in-Disk BTree.
+ BTree mergedBTree = this.bTreeFactory.createBTreeInstance(mergedBTreeId);
+ mergedBTree.create(mergedBTreeId);
+ // TODO: Close the BTree during cleanup.
+ mergedBTree.open(mergedBTreeId);
+
+ // BulkLoad the tuples from the in-memory tree into the new disk BTree.
+ IIndexBulkLoadContext bulkLoadCtx = mergedBTree.beginBulkLoad(1.0f);
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ mergedBTree.bulkLoadAddTuple(frameTuple, bulkLoadCtx);
+ }
+ } finally {
+ rangeCursor.close();
+ }
+
+ mergedBTree.endBulkLoad(bulkLoadCtx);
+
+ InDiskTreeInfo newLinkedListNode = new InDiskTreeInfo(mergedBTree);
+ LinkedList<InDiskTreeInfo> tempInDiskTreeInfo;
+ synchronized (inDiskTreeInfoList) {
+ mergedInDiskTreeInfoList = (LinkedList<InDiskTreeInfo>) inDiskTreeInfoList.clone();
+ // Remove the redundant trees, and add the new merged tree in the
+ // last off the list
+ for (int i = 0; i < numberOfInDiskTrees; i++) {
+ mergedInDiskTreeInfoList.removeLast();
+ }
+ mergedInDiskTreeInfoList.addLast(newLinkedListNode);
+
+ // TODO: to swap the linkedlists
+ /*
+ * tempInDiskTreeInfo = inDiskTreeInfoList; inDiskTreeInfoList =
+ * mergedInDiskTreeInfoList; mergedInDiskTreeInfoList =
+ * tempInDiskTreeInfo;
+ */
+ // TODO: to swap the searchThreadCounters
+
+ // 1. should add the searcherReferenceCounter
+ // 2. Wrap the searcherReferenceCounter as Integer object,
+ // otherwise, the reference cannot be swapped
+ // 3. modify the "decrease counter part" in search(), and let the
+ // searcher remember the localReferences
+
+ }
+ // TODO: to wake up the cleaning thread
+ }
+
+ @Override
+ public ITreeIndexFrameFactory getLeafFrameFactory() {
+ return memBTree.getLeafFrameFactory();
+ }
+
+ @Override
+ public ITreeIndexFrameFactory getInteriorFrameFactory() {
+ return memBTree.getInteriorFrameFactory();
+ }
+
+ @Override
+ public IFreePageManager getFreePageManager() {
+ return memBTree.getFreePageManager();
+ }
+
+ @Override
+ public int getFieldCount() {
+ return memBTree.getFieldCount();
+ }
+
+ @Override
+ public int getRootPageId() {
+ return memBTree.getRootPageId();
+ }
+
+ @Override
+ public IndexType getIndexType() {
+ return memBTree.getIndexType();
+ }
+
+ public void resetInMemoryTree() throws HyracksDataException {
+ ((InMemoryFreePageManager) memFreePageManager).reset();
+ memBTree.create(fileId);
+ }
+
+ // This function is just for testing flushInMemoryBtree()
+ public void scanDiskTree(int treeIndex) throws Exception {
+ BTree onDiskBtree = inDiskTreeInfoList.get(treeIndex).getBTree();
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(
+ (IBTreeLeafFrame) this.insertLeafFrameFactory.createFrame(), false);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ ITreeIndexAccessor onDiskBtreeAccessor = onDiskBtree.createAccessor();
+ onDiskBtreeAccessor.search(scanCursor, nullPred);
+ try {
+ int scanTupleIndex = 0;
+ while (scanCursor.hasNext()) {
+ scanCursor.hasNext();
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ int numPrintFields = Math.min(frameTuple.getFieldCount(), recDescSers.length);
+
+ for (int i = 0; i < numPrintFields; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = recDescSers[i].deserialize(dataIn);
+
+ if (i == 1)
+ System.out.printf("LSMTree.scanDiskTree(): scanTupleIndex[%d]; Value is [%d]; ",
+ scanTupleIndex, Integer.parseInt(o.toString()));
+
+ }
+
+ if (((LSMTypeAwareTupleReference) frameTuple).isDelete()) {
+ System.out.printf(" DELETE\n");
+ } else {
+ System.out.printf(" INSERT\n");
+ }
+ scanTupleIndex++;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+ }
+
+ // This function is just for testing flushInMemoryBtree()
+ public void scanInMemoryTree() throws Exception {
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ ITreeIndexCursor scanCursor = new BTreeRangeSearchCursor(
+ (IBTreeLeafFrame) this.insertLeafFrameFactory.createFrame(), false);
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ ITreeIndexAccessor inMemBtreeAccessor = memBTree.createAccessor();
+ inMemBtreeAccessor.search(scanCursor, nullPred);
+ try {
+ int scanTupleIndex = 0;
+ while (scanCursor.hasNext()) {
+ scanCursor.hasNext();
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ int numPrintFields = Math.min(frameTuple.getFieldCount(), recDescSers.length);
+ for (int i = 0; i < numPrintFields; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ Object o = recDescSers[i].deserialize(dataIn);
+ if (i == 1)
+ System.out.printf("LSMTree.scanMemoryTree(): scanTupleIndex[%d]; Value is [%d]; ",
+ scanTupleIndex, Integer.parseInt(o.toString()));
+ }
+ if (((LSMTypeAwareTupleReference) frameTuple).isDelete()) {
+ System.out.printf(" DELETE\n");
+ } else {
+ System.out.printf(" INSERT\n");
+ }
+ scanTupleIndex++;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+ }
+
+ public LSMTreeOpContext createOpContext() {
+ return new LSMTreeOpContext((BTree.BTreeAccessor) memBTree.createAccessor(), insertLeafFrameFactory,
+ deleteLeafFrameFactory, interiorFrameFactory, memFreePageManager.getMetaDataFrameFactory()
+ .createFrame(), cmp);
+ }
+
+ @Override
+ public ITreeIndexAccessor createAccessor() {
+ return new LSMTreeIndexAccessor(this);
+ }
+
+ private class LSMTreeIndexAccessor implements ITreeIndexAccessor {
+ private LSMTree lsmTree;
+ private LSMTreeOpContext ctx;
+
+ public LSMTreeIndexAccessor(LSMTree lsmTree) {
+ this.lsmTree = lsmTree;
+ this.ctx = lsmTree.createOpContext();
+ }
+
+ @Override
+ public void insert(ITupleReference tuple) throws HyracksDataException, TreeIndexException,
+ PageAllocationException {
+ ctx.reset(IndexOp.INSERT);
+ lsmTree.insert(tuple, ctx);
+ }
+
+ @Override
+ public void update(ITupleReference tuple) throws HyracksDataException, TreeIndexException,
+ PageAllocationException {
+ throw new UnsupportedOperationException("Update not supported by LSMTree");
+ }
+
+ @Override
+ public void delete(ITupleReference tuple) throws HyracksDataException, TreeIndexException,
+ PageAllocationException {
+ ctx.reset(IndexOp.DELETE);
+ lsmTree.delete(tuple, ctx);
+ }
+
+ @Override
+ public void search(ITreeIndexCursor cursor, ISearchPredicate searchPred) throws HyracksDataException,
+ TreeIndexException, PageAllocationException {
+ ctx.reset(IndexOp.SEARCH);
+ // TODO: fix exception handling throughout LSM tree.
+ try {
+ lsmTree.search(cursor, (RangePredicate) searchPred, ctx);
+ } catch (Exception e) {
+ throw new HyracksDataException(e);
+ }
+ }
+
+ @Override
+ public void diskOrderScan(ITreeIndexCursor cursor) throws HyracksDataException {
+ throw new UnsupportedOperationException("DiskOrderScan not supported by LSMTree");
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeCursorInitialState.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeCursorInitialState.java
new file mode 100644
index 0000000..5391d9c
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeCursorInitialState.java
@@ -0,0 +1,47 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class LSMTreeCursorInitialState implements ICursorInitialState {
+
+ private int numberOfTrees;
+ private ITreeIndexFrameFactory leafFrameFactory;
+ private MultiComparator cmp;
+ private LSMTree lsm;
+
+ public LSMTreeCursorInitialState(int numberOfTrees, ITreeIndexFrameFactory leafFrameFactory, MultiComparator cmp, LSMTree lsm) {
+ this.numberOfTrees = numberOfTrees;
+ this.leafFrameFactory = leafFrameFactory;
+ this.cmp = cmp;
+ this.lsm = lsm;
+ }
+
+ public int getNumberOfTrees() {
+ return numberOfTrees;
+ }
+
+ public ITreeIndexFrameFactory getLeafFrameFactory() {
+ return leafFrameFactory;
+ }
+
+ public MultiComparator getCmp() {
+ return cmp;
+ }
+
+ @Override
+ public ICachedPage getPage() {
+ return null;
+ }
+
+ @Override
+ public void setPage(ICachedPage page) {
+ }
+
+ public LSMTree getLsm() {
+ return lsm;
+ }
+
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeDeleteTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeDeleteTest.java
new file mode 100644
index 0000000..3375918
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeDeleteTest.java
@@ -0,0 +1,1095 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import static org.junit.Assert.fail;
+
+import java.io.ByteArrayInputStream;
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class LSMTreeDeleteTest extends AbstractLSMTreeTest {
+
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 100;
+ private static final int MAX_OPEN_FILES = 100;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ // BASIC DELETE TEST
+ // create a fix-length lsm tree, and do 100 deletes. That is insert 100
+ // delete nodes into the in-memory tree.
+ @Test
+ public void Test1() throws Exception {
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(100, metaFrameFactory);
+
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, insertLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+
+ int resultSize = 50;
+ int[][] resultArray = new int[resultSize][3];
+
+ for (int i = 0; i < resultSize; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i + 1;
+ resultArray[i][2] = 1;
+ }
+
+ // delete
+ for (int i = 0; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test01:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // scan
+ ITreeIndexCursor scanCursor = new LSMTreeRangeSearchCursor();
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ lsmTreeAccessor.search(scanCursor, nullPred);
+
+ try {
+ int scanTupleIndex = 0;
+ int arrayIndex = 0;
+ Object o = null;
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ int numPrintFields = Math.min(frameTuple.getFieldCount(), recDescSers.length);
+
+ for (int i = 0; i < numPrintFields; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ o = recDescSers[i].deserialize(dataIn);
+
+ }
+ while (resultArray[arrayIndex][2] != 0) {
+ arrayIndex++;
+ }
+ if (Integer.parseInt(o.toString()) != resultArray[arrayIndex][1]) {
+ fail("Input value and Output value doesn't match on the " + scanTupleIndex + " tuple\n");
+ }
+ scanTupleIndex++;
+ arrayIndex++;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+ }
+
+ // INSERT-DELETE TEST
+ // create a fix-length lsm tree. First, do 100 insertions,
+ // and then do 50 deletions which has the same 50 keys which are part of the
+ // insertions.
+ @Test
+ public void Test2() throws Exception {
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(100, metaFrameFactory);
+
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, insertLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ int resultSize = 100;
+ int deleteStartPosition = 50;
+ int[][] resultArray = new int[resultSize][3];
+
+ for (int i = 0; i < resultSize; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i + 1;
+ resultArray[i][2] = 0;
+ }
+
+ // insert
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+ for (int i = 0; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test02:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // delete
+ for (int i = deleteStartPosition; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = ++resultArray[i][1];
+ resultArray[i][2] = 1;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test02:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // scan
+ ITreeIndexCursor scanCursor = new LSMTreeRangeSearchCursor();
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ lsmTreeAccessor.search(scanCursor, nullPred);
+
+ try {
+ int scanTupleIndex = 0;
+ int arrayIndex = 0;
+ Object o = null;
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ int numPrintFields = Math.min(frameTuple.getFieldCount(), recDescSers.length);
+
+ for (int i = 0; i < numPrintFields; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ o = recDescSers[i].deserialize(dataIn);
+
+ }
+ while (resultArray[arrayIndex][2] != 0) {
+ arrayIndex++;
+ }
+ if (Integer.parseInt(o.toString()) != resultArray[arrayIndex][1]) {
+ fail("Input value and Output value doesn't match on the " + scanTupleIndex + " tuple\n");
+ }
+
+ scanTupleIndex++;
+ arrayIndex++;
+
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+ }
+
+ // DELETE->INSERT TEST
+ // create a fix-length lsm tree. First, do 100 deletions,
+ // and then do 50 insertions which has the same 50 keys which are part of
+ // the deletions.
+ @Test
+ public void Test3() throws Exception {
+ System.out.println("TEST3");
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in mem
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ // change
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(100, metaFrameFactory);
+
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, insertLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ int resultSize = 100;
+ int insertStartPosition = 50;
+ int[][] resultArray = new int[resultSize][3];
+
+ for (int i = 0; i < resultSize; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i + 1;
+ resultArray[i][2] = 1;
+ }
+
+ // delete
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+ for (int i = 0; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test03:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // insert
+ for (int i = insertStartPosition; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = ++resultArray[i][1];
+ resultArray[i][2] = 0;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test03:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // scan
+ ITreeIndexCursor scanCursor = new LSMTreeRangeSearchCursor();
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ lsmTreeAccessor.search(scanCursor, nullPred);
+
+ try {
+ int scanTupleIndex = 0;
+ int arrayIndex = 0;
+ Object o = null;
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ int numPrintFields = Math.min(frameTuple.getFieldCount(), recDescSers.length);
+
+ for (int i = 0; i < numPrintFields; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ o = recDescSers[i].deserialize(dataIn);
+ }
+ while (resultArray[arrayIndex][2] != 0) {
+ arrayIndex++;
+ }
+ if (Integer.parseInt(o.toString()) != resultArray[arrayIndex][1]) {
+ fail("Input value and Output value doesn't match on the " + scanTupleIndex + " tuple\n");
+ }
+
+ scanTupleIndex++;
+ arrayIndex++;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+ }
+
+ // TEST DELETION and PageAllocationException
+ // create a fix-length lsm tree. First, do 811 deletions,
+ // the page will be run out on the 810th deletions, if there is any
+ // exception returns, the test case fails.
+ @Test
+ public void Test4() throws Exception {
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(100, metaFrameFactory);
+
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, insertLeafFrameFactory);
+
+ // For the Flush Mechanism
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ int resultSize = 811;
+ int[][] resultArray = new int[resultSize][2];
+
+ for (int i = 0; i < resultSize; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i + 1;
+ }
+
+ // delete
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+ for (int i = 0; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test04:" + e);
+ e.printStackTrace();
+ fail("test04: Catch TreeIndexException" + e);
+ } catch (Exception e) {
+ e.printStackTrace();
+ fail("test04: Catch Other Exceptions" + e);
+ }
+ }
+ }
+
+ // DELETE -> DELETE
+ // create a fix-length lsm tree. First, do 100 deletions,
+ // and then do 50 deletions which has the same 50 keys which are part of the
+ // first deletions.
+ @Test
+ public void Test5() throws Exception {
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(100, metaFrameFactory);
+
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, insertLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ int resultSize = 100;
+ int insertStartPosition = 50;
+ int[][] resultArray = new int[resultSize][3];
+
+ for (int i = 0; i < resultSize; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i + 1;
+ resultArray[i][2] = 1;
+ }
+
+ // First deletion part
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+ for (int i = 0; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test05:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // Second delete part
+ for (int i = insertStartPosition; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = ++resultArray[i][1];
+ resultArray[i][2] = 1;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test05:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // scan
+ ITreeIndexCursor scanCursor = new LSMTreeRangeSearchCursor();
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ lsmTreeAccessor.search(scanCursor, nullPred);
+
+ try {
+ int scanTupleIndex = 0;
+ int arrayIndex = 0;
+ Object o = null;
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ int numPrintFields = Math.min(frameTuple.getFieldCount(), recDescSers.length);
+
+ for (int i = 0; i < numPrintFields; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ o = recDescSers[i].deserialize(dataIn);
+
+ }
+ while (resultArray[arrayIndex][2] != 0) {
+ arrayIndex++;
+ }
+ if (Integer.parseInt(o.toString()) != resultArray[arrayIndex][1]) {
+ fail("Input value and Output value doesn't match on the " + scanTupleIndex + " tuple\n");
+ }
+
+ scanTupleIndex++;
+ arrayIndex++;
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+ }
+
+ // INSERT -> DELETE -> INSERT
+ // create a fix-length lsm tree. Do the insertion, deletion and insertion.
+ // the final result will be
+ // | 0~9 | 10~19 | 20~39 | 40~59 | 60~79 | 80~99 |
+ // | f1=10 | f1=9 | f1=8 | f1=7 | f1=6 | f1=5 |
+ // | Insert| Insert| Delete| Delete| Insert| Insert|
+ @Test
+ public void Test6() throws Exception {
+
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in mem
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(100, metaFrameFactory);
+
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, insertLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+
+ int resultSize = 180;
+ int[][] resultArray = new int[resultSize][3];
+
+ // insert
+ for (int i = 0; i < 180; i++) {
+ int f0 = i % 100;
+ int f1;
+ if (i >= 100) {
+ f1 = 6;
+ } else {
+ f1 = 5;
+ }
+
+ resultArray[f0][0] = f0;
+ resultArray[f0][1] = f1;
+ resultArray[f0][2] = 0;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test06:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // delete
+ for (int i = 0; i < 100; i++) {
+ int f0 = i % 60;
+ int f1;
+ if (i >= 60) {
+ f1 = 8;
+ } else {
+ f1 = 7;
+ }
+
+ resultArray[f0][0] = f0;
+ resultArray[f0][1] = f1;
+ resultArray[f0][2] = 1;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test06:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+
+ }
+
+ // reinsert
+ for (int i = 0; i < 30; i++) {
+ int f0 = i % 20;
+ int f1;
+ if (i >= 20) {
+ f1 = 10;
+ } else {
+ f1 = 9;
+ }
+
+ resultArray[f0][0] = f0;
+ resultArray[f0][1] = f1;
+ resultArray[f0][2] = 0;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test06:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // scan
+ ITreeIndexCursor scanCursor = new LSMTreeRangeSearchCursor();
+ RangePredicate nullPred = new RangePredicate(true, null, null, true, true, null, null);
+ lsmTreeAccessor.search(scanCursor, nullPred);
+
+ try {
+ int scanTupleIndex = 0;
+ int arrayIndex = 0;
+ Object o = null;
+ while (scanCursor.hasNext()) {
+ scanCursor.next();
+ ITupleReference frameTuple = scanCursor.getTuple();
+ int numPrintFields = Math.min(frameTuple.getFieldCount(), recDescSers.length);
+
+ for (int i = 0; i < numPrintFields; i++) {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(frameTuple.getFieldData(i),
+ frameTuple.getFieldStart(i), frameTuple.getFieldLength(i));
+ DataInput dataIn = new DataInputStream(inStream);
+ o = recDescSers[i].deserialize(dataIn);
+ }
+ while (resultArray[arrayIndex][2] != 0) {
+ arrayIndex++;
+ }
+ if (Integer.parseInt(o.toString()) != resultArray[arrayIndex][1]) {
+ fail("Input value and Output value doesn't match on the " + scanTupleIndex + " tuple\n");
+ }
+
+ scanTupleIndex++;
+ arrayIndex++;
+ }
+
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ scanCursor.close();
+ }
+
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeFlushTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeFlushTest.java
new file mode 100644
index 0000000..c0f67b1
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeFlushTest.java
@@ -0,0 +1,748 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class LSMTreeFlushTest extends AbstractLSMTreeTest {
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 100;
+ private static final int MAX_OPEN_FILES = 10000;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ // BASIC TEST
+ // @Test
+ // public void Test1() throws Exception {
+ // System.out.printf("TEST1 START\n");
+ // //in disk
+ // TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
+ // MAX_OPEN_FILES);
+ // IBufferCache bufferCache =
+ // TestStorageManagerComponentHolder.getBufferCache(ctx);
+ // IFileMapProvider fmp =
+ // TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ // FileReference file = new FileReference(new File(fileName));
+ // bufferCache.createFile(file);
+ // int fileId = fmp.lookupFileId(file);
+ // bufferCache.openFile(fileId);
+ //
+ // //in memory
+ // InMemoryBufferCacheFactory InMemBufferCacheFactory = new
+ // InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ // IBufferCache memBufferCache =
+ // InMemBufferCacheFactory.createInMemoryBufferCache();
+ //
+ // // declare fields
+ // int fieldCount = 2;
+ // ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ // typeTraits[0] = new TypeTrait(4);
+ // typeTraits[1] = new TypeTrait(4);
+ //
+ // // declare keys
+ // int keyFieldCount = 1;
+ // IBinaryComparatorFactory[] cmpFactories = new
+ // IBinaryComparatorFactory[keyFieldCount];
+ // cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+ //
+ // MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
+ //
+ // LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new
+ // LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ // LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new
+ // LSMTypeAwareTupleWriterFactory(typeTraits, true);
+ //
+ // ITreeIndexFrameFactory insertLeafFrameFactory = new
+ // BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ // ITreeIndexFrameFactory deleteLeafFrameFactory = new
+ // BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ // ITreeIndexFrameFactory interiorFrameFactory = new
+ // BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ // ITreeIndexMetaDataFrameFactory metaFrameFactory = new
+ // LIFOMetaDataFrameFactory();
+ //
+ // IBTreeLeafFrame insertLeafFrame = (IBTreeLeafFrame)
+ // insertLeafFrameFactory.createFrame();
+ //
+ // IFreePageManager freePageManager = new
+ // LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+ // IFreePageManager memFreePageManager = new InMemoryFreePageManager(100,
+ // metaFrameFactory);
+ //
+ // // For the Flush Mechanism
+ // LSMEntireTupleWriterFactory flushTupleWriterFactory = new
+ // LSMEntireTupleWriterFactory(typeTraits);
+ // ITreeIndexFrameFactory flushLeafFrameFactory = new
+ // BTreeNSMLeafFrameFactory(flushTupleWriterFactory);
+ // FreePageManagerFactory freePageManagerFactory = new
+ // FreePageManagerFactory(bufferCache, metaFrameFactory);
+ // BTreeFactory bTreeFactory = new BTreeFactory(bufferCache,
+ // freePageManagerFactory, cmp, fieldCount, interiorFrameFactory,
+ // flushLeafFrameFactory);
+ //
+ //
+ //
+ // // LSMTree lsmtree = new LSMTree(3, 100, 2, memBufferCache, bufferCache,
+ // fieldCount, cmp, memFreePageManager,
+ // // freePageManager, interiorFrameFactory, insertLeafFrameFactory,
+ // deleteLeafFrameFactory, bTreeFactory, flushLeafFrameFactory,
+ // (IFileMapManager)fmp);
+ // //
+ // LSMTree lsmtree = LSMTreeUtils.createLSMTree(memBufferCache, bufferCache,
+ // fileId, typeTraits, cmp.getComparators(), BTreeLeafFrameType.REGULAR_NSM,
+ // (IFileMapManager)fmp);
+ // lsmtree.create(fileId);
+ // lsmtree.open(fileId);
+ //
+ // ByteBuffer frame = ctx.allocateFrame();
+ // FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ //
+ // ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ // DataOutput dos = tb.getDataOutput();
+ //
+ // ISerializerDeserializer[] recDescSers = {
+ // IntegerSerializerDeserializer.INSTANCE,
+ // IntegerSerializerDeserializer.INSTANCE };
+ // RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ //
+ // IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(),
+ // recDesc);
+ // accessor.reset(frame);
+ //
+ // FrameTupleReference tuple = new FrameTupleReference();
+ //
+ // int resultSize = 100;
+ // int[][] resultArray = new int[resultSize][2];
+ //
+ //
+ // //insert 100 tuples
+ // for (int i = 0; i < resultSize; i++){
+ // resultArray[i][0] = i;
+ // resultArray[i][1] = 1;
+ // }
+ //
+ //
+ // LSMTreeOpContext insertOpCtx = lsmtree.createOpContext(IndexOp.INSERT);
+ // for (int i = 0; i < resultSize; i++) {
+ //
+ // int f0 = resultArray[i][0];
+ // int f1 = resultArray[i][1];
+ //
+ // tb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ // tb.addFieldEndOffset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ // tb.addFieldEndOffset();
+ //
+ // appender.reset(frame, true);
+ // appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ // tb.getSize());
+ //
+ // tuple.reset(accessor, 0);
+ //
+ // ArrayTupleReference t = new ArrayTupleReference();
+ // t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ //
+ // try {
+ // lsmtree.insert(t, insertOpCtx);
+ // } catch (TreeIndexException e) {
+ // System.out.println("test01:" + e);
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+ // // Delete the first 50 keys in the in-memory tree
+ // insertOpCtx = lsmtree.createOpContext(IndexOp.DELETE);
+ // for (int i = 0; i < 50; i++){
+ // resultArray[i][0] = i;
+ // resultArray[i][1] = 1;
+ // }
+ //
+ // for (int i = 0; i < 50; i++) {
+ //
+ // int f0 = resultArray[i][0];
+ // int f1 = resultArray[i][1];
+ //
+ // tb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ // tb.addFieldEndOffset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ // tb.addFieldEndOffset();
+ //
+ // appender.reset(frame, true);
+ // appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ // tb.getSize());
+ //
+ // tuple.reset(accessor, 0);
+ //
+ // ArrayTupleReference t = new ArrayTupleReference();
+ // t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ //
+ // try {
+ // lsmtree.delete(t, insertOpCtx);
+ // } catch (TreeIndexException e) {
+ // System.out.println("test01:" + e);
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+ //
+ //
+ // //Flush the tree into the first in Disk tree
+ // lsmtree.flushInMemoryBtree();
+ //
+ // //insert 50 delete nodes
+ // insertOpCtx = lsmtree.createOpContext(IndexOp.DELETE);
+ // for (int i = 0; i < 50; i++){
+ // resultArray[i][0] = i;
+ // resultArray[i][1] = 2;
+ // }
+ //
+ // for (int i = 0; i < 50; i++) {
+ //
+ // int f0 = resultArray[i][0];
+ // int f1 = resultArray[i][1];
+ //
+ // tb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ // tb.addFieldEndOffset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ // tb.addFieldEndOffset();
+ //
+ // appender.reset(frame, true);
+ // appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ // tb.getSize());
+ //
+ // tuple.reset(accessor, 0);
+ //
+ // ArrayTupleReference t = new ArrayTupleReference();
+ // t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ //
+ // try {
+ // lsmtree.delete(t, insertOpCtx);
+ // } catch (TreeIndexException e) {
+ // System.out.println("test01:" + e);
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+ //
+ // // insert 25 nodes
+ // insertOpCtx = lsmtree.createOpContext(IndexOp.INSERT);
+ // for (int i = 0; i < resultSize; i++){
+ // resultArray[i][0] = i;
+ // resultArray[i][1] = 2;
+ // }
+ // for (int i = 0; i < 25; i++) {
+ //
+ // int f0 = resultArray[i][0];
+ // int f1 = resultArray[i][1];
+ //
+ // tb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ // tb.addFieldEndOffset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ // tb.addFieldEndOffset();
+ //
+ // appender.reset(frame, true);
+ // appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ // tb.getSize());
+ //
+ // tuple.reset(accessor, 0);
+ //
+ // ArrayTupleReference t = new ArrayTupleReference();
+ // t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ //
+ // try {
+ // lsmtree.insert(t, insertOpCtx);
+ // } catch (TreeIndexException e) {
+ // System.out.println("test01:" + e);
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+ //
+ // //Flush the tree into the fist in Disk tree, which have fieldId as "1"
+ // lsmtree.flushInMemoryBtree();
+ //
+ // //Print out the first in Disk Btree
+ // System.out.println("LSMTreeFlushTest: start print the first tree");
+ // lsmtree.scanDiskTree(0);
+ // //Print out the second in Disk Btree
+ // System.out.println("LSMTreeFlushTest: start print the second tree");
+ // lsmtree.scanDiskTree(1);
+ //
+ //
+ // lsmtree.close();
+ // bufferCache.closeFile(fileId);
+ // memBufferCache.close();
+ //
+ // System.out.printf("End of TEST1()\n");
+ //
+ // }
+ // TEST auto Flush
+ @Test
+ public void Test2() throws Exception {
+ System.out.printf("TEST2 START\n");
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IBTreeLeafFrame insertLeafFrame = (IBTreeLeafFrame) insertLeafFrameFactory.createFrame();
+
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(NUM_PAGES, metaFrameFactory);
+
+ // For the Flush Mechanism
+ LSMEntireTupleWriterFactory flushTupleWriterFactory = new LSMEntireTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory flushLeafFrameFactory = new BTreeNSMLeafFrameFactory(flushTupleWriterFactory);
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, flushLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ int resultSize = 820;
+ int[][] resultArray = new int[resultSize][2];
+
+ // insert 820 tuples
+ for (int i = 0; i < resultSize; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i;
+ }
+
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+ for (int i = 0; i < resultSize; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test02:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // Print out the third in Disk Btree
+ System.out.println("LSMTreeFlushTest: start print the first tree");
+ // lsmtree.scanDiskTree(2);
+ // Print out the second in Disk Btree
+ System.out.println("LSMTreeFlushTest: start print the first tree");
+ // lsmtree.scanDiskTree(1);
+ // Print out the first in Disk Btree
+ System.out.println("LSMTreeFlushTest: start print the first tree");
+ lsmtree.scanDiskTree(0);
+
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+
+ System.out.printf("End of TEST2()\n");
+
+ }
+
+ // @Test
+ // public void Test3() throws Exception {
+ // System.out.printf("TEST3 START\n");
+ // //in disk
+ // TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES,
+ // MAX_OPEN_FILES);
+ // IBufferCache bufferCache =
+ // TestStorageManagerComponentHolder.getBufferCache(ctx);
+ // IFileMapProvider fmp =
+ // TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ // FileReference file = new FileReference(new File(fileName));
+ // bufferCache.createFile(file);
+ // int fileId = fmp.lookupFileId(file);
+ // bufferCache.openFile(fileId);
+ //
+ // //in memory
+ // InMemoryBufferCacheFactory InMemBufferCacheFactory = new
+ // InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ // IBufferCache memBufferCache =
+ // InMemBufferCacheFactory.createInMemoryBufferCache();
+ //
+ // // declare fields
+ // int fieldCount = 2;
+ // ITypeTrait[] typeTraits = new ITypeTrait[fieldCount];
+ // typeTraits[0] = new TypeTrait(4);
+ // typeTraits[1] = new TypeTrait(4);
+ //
+ // // declare keys
+ // int keyFieldCount = 1;
+ // IBinaryComparatorFactory[] cmpFactories = new
+ // IBinaryComparatorFactory[keyFieldCount];
+ // cmpFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+ //
+ // MultiComparator cmp = BTreeUtils.createMultiComparator(cmpFactories);
+ //
+ // LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new
+ // LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ // LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new
+ // LSMTypeAwareTupleWriterFactory(typeTraits, true);
+ //
+ // ITreeIndexFrameFactory insertLeafFrameFactory = new
+ // BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ // ITreeIndexFrameFactory deleteLeafFrameFactory = new
+ // BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ // ITreeIndexFrameFactory interiorFrameFactory = new
+ // BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ // ITreeIndexMetaDataFrameFactory metaFrameFactory = new
+ // LIFOMetaDataFrameFactory();
+ //
+ // IBTreeLeafFrame insertLeafFrame = (IBTreeLeafFrame)
+ // insertLeafFrameFactory.createFrame();
+ //
+ // IFreePageManager freePageManager = new
+ // LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+ // IFreePageManager memFreePageManager = new InMemoryFreePageManager(30,
+ // metaFrameFactory);
+ //
+ // // For the Flush Mechanism
+ // LSMEntireTupleWriterFactory flushTupleWriterFactory = new
+ // LSMEntireTupleWriterFactory(typeTraits);
+ // ITreeIndexFrameFactory flushLeafFrameFactory = new
+ // BTreeNSMLeafFrameFactory(flushTupleWriterFactory);
+ // FreePageManagerFactory freePageManagerFactory = new
+ // FreePageManagerFactory(bufferCache, metaFrameFactory);
+ // BTreeFactory bTreeFactory = new BTreeFactory(bufferCache,
+ // freePageManagerFactory, cmp, fieldCount, interiorFrameFactory,
+ // flushLeafFrameFactory);
+ //
+ //
+ //
+ // LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount,
+ // cmp, memFreePageManager, interiorFrameFactory, insertLeafFrameFactory,
+ // deleteLeafFrameFactory, bTreeFactory, (IFileMapManager)fmp);
+ //
+ // lsmtree.create(fileId);
+ // lsmtree.open(fileId);
+ //
+ // ByteBuffer frame = ctx.allocateFrame();
+ // FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+ //
+ // ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ // DataOutput dos = tb.getDataOutput();
+ //
+ // ISerializerDeserializer[] recDescSers = {
+ // IntegerSerializerDeserializer.INSTANCE,
+ // IntegerSerializerDeserializer.INSTANCE };
+ // RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+ //
+ // IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(),
+ // recDesc);
+ // accessor.reset(frame);
+ //
+ // FrameTupleReference tuple = new FrameTupleReference();
+ //
+ // int resultSize = 500;
+ // int[][] resultArray = new int[resultSize][2];
+ //
+ //
+ // //insert 250 tuples
+ // System.out.printf("Start for 1st Insert\n");
+ // LSMTreeOpContext insertOpCtx = lsmtree.createOpContext(IndexOp.INSERT);
+ // for (int i = 0; i < 252; i++){
+ // resultArray[i][0] = i;
+ // resultArray[i][1] = i;
+ // }
+ // for (int i = 0; i < 252; i++) {
+ //
+ // int f0 = resultArray[i][0];
+ // int f1 = resultArray[i][1];
+ //
+ // tb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ // tb.addFieldEndOffset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ // tb.addFieldEndOffset();
+ //
+ // appender.reset(frame, true);
+ // appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ // tb.getSize());
+ //
+ // tuple.reset(accessor, 0);
+ //
+ // ArrayTupleReference t = new ArrayTupleReference();
+ // t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ //
+ // try {
+ // lsmtree.insert(t, insertOpCtx);
+ // } catch (TreeIndexException e) {
+ // System.out.println("test03:" + e);
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+ // System.out.printf("Start for 2nd Insert\n");
+ // //delete 126~251. Deletion of 251 cause the flush
+ // insertOpCtx.reset(IndexOp.DELETE);
+ // // LSMTreeOpContext insertOpCtx =
+ // lsmtree.createOpContext(IndexOp.DELETE);
+ // for (int i = 125; i < 253; i++){
+ // resultArray[i][0] = i;
+ // resultArray[i][1] = i;
+ // }
+ // for (int i = 126; i < 253; i++) {
+ // int f0 = resultArray[i][0];
+ // int f1 = resultArray[i][1];
+ //
+ // tb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ // tb.addFieldEndOffset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ // tb.addFieldEndOffset();
+ //
+ // appender.reset(frame, true);
+ // appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ // tb.getSize());
+ //
+ // tuple.reset(accessor, 0);
+ //
+ // ArrayTupleReference t = new ArrayTupleReference();
+ // t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ //
+ // try {
+ // lsmtree.delete(t, insertOpCtx);
+ // } catch (TreeIndexException e) {
+ // System.out.println("test03:" + e);
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+ // //delete 0~250
+ // insertOpCtx = lsmtree.createOpContext(IndexOp.INSERT);
+ // for (int i = 130; i > 0; i--){
+ // resultArray[i][0] = i;
+ // resultArray[i][1] = i;
+ // }
+ // for (int i = 130; i > 0; i--) {
+ //
+ // int f0 = resultArray[i][0];
+ // int f1 = resultArray[i][1];
+ //
+ // tb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ // tb.addFieldEndOffset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ // tb.addFieldEndOffset();
+ //
+ // appender.reset(frame, true);
+ // appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0,
+ // tb.getSize());
+ //
+ // tuple.reset(accessor, 0);
+ //
+ // ArrayTupleReference t = new ArrayTupleReference();
+ // t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+ //
+ // try {
+ // lsmtree.insert(t, insertOpCtx);
+ // } catch (TreeIndexException e) {
+ // System.out.println("test03:" + e);
+ // e.printStackTrace();
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // }
+ // }
+ //
+ // //
+ // //
+ // //
+ // // //Print out the second in Disk Btree
+ // // System.out.println("LSMTreeFlushTest: start print the second tree");
+ // // lsmtree.scanDiskTree(1);
+ // // //Print out the first in Disk Btree
+ // // System.out.println("LSMTreeFlushTest: start print the first tree");
+ // // lsmtree.scanDiskTree(0);
+ // //
+ // // //Print out the In-memory Tree
+ // //
+ // System.out.println("LSMTreeFlushTest: start print the In-memory tree");
+ // // lsmtree.scanInMemoryTree();
+ // // //TODO: scan whole tree
+ //
+ // LOGGER.info("RANGE SEARCH:");
+ //
+ // BTreeOpContext searchOpCtx = lsmtree.createOpContext(IndexOp.SEARCH);
+ // ITreeIndexCursor rangeCursor = new LSMTreeRangeSearchCursor();
+ //
+ // // build low and high keys
+ // ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ // DataOutput kdos = ktb.getDataOutput();
+ //
+ // ISerializerDeserializer[] keyDescSers = {
+ // IntegerSerializerDeserializer.INSTANCE };
+ // RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ // IFrameTupleAccessor keyAccessor = new
+ // FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ // keyAccessor.reset(frame);
+ //
+ // appender.reset(frame, true);
+ //
+ // // build and append low key
+ // ktb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(-1, kdos);
+ // ktb.addFieldEndOffset();
+ // appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0,
+ // ktb.getSize());
+ //
+ // // build and append high key
+ // ktb.reset();
+ // IntegerSerializerDeserializer.INSTANCE.serialize(300, kdos);
+ // ktb.addFieldEndOffset();
+ // appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0,
+ // ktb.getSize());
+ //
+ // // create tuplereferences for search keys
+ // FrameTupleReference lowKey = new FrameTupleReference();
+ // lowKey.reset(keyAccessor, 0);
+ //
+ // FrameTupleReference highKey = new FrameTupleReference();
+ // highKey.reset(keyAccessor, 1);
+ //
+ // IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ // searchCmps[0] =
+ // IntegerBinaryComparatorFactory.INSTANCE.createBinaryComparator();
+ // MultiComparator searchCmp = new MultiComparator(searchCmps);
+ //
+ // RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
+ // true, true, searchCmp, searchCmp);
+ // lsmtree.search(rangeCursor, rangePred, searchOpCtx);
+ //
+ // try {
+ // while (rangeCursor.hasNext()) {
+ // rangeCursor.next();
+ // ITupleReference frameTuple = rangeCursor.getTuple();
+ // String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+ // if(((LSMTypeAwareTupleReference)frameTuple).isDelete()) {
+ // System.out.println("del " + rec);
+ // }
+ // else {
+ // System.out.println("ins " + rec);
+ // }
+ // // System.out.println("------------------");
+ // }
+ // } catch (Exception e) {
+ // e.printStackTrace();
+ // } finally {
+ // rangeCursor.close();
+ // }
+ //
+ // lsmtree.close();
+ // bufferCache.closeFile(fileId);
+ // memBufferCache.close();
+ //
+ // System.out.printf("End of TEST3()\n");
+ //
+ // }
+
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeMergeTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeMergeTest.java
new file mode 100644
index 0000000..75d6ce1
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeMergeTest.java
@@ -0,0 +1,371 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class LSMTreeMergeTest extends AbstractLSMTreeTest {
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 30;
+ private static final int MAX_OPEN_FILES = 100;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ @Test
+ public void Test1() throws Exception {
+ System.out.printf("TEST1 START\n");
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IBTreeLeafFrame insertLeafFrame = (IBTreeLeafFrame) insertLeafFrameFactory.createFrame();
+
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(NUM_PAGES, metaFrameFactory);
+
+ // For the Flush Mechanism
+ LSMEntireTupleWriterFactory flushTupleWriterFactory = new LSMEntireTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory flushLeafFrameFactory = new BTreeNSMLeafFrameFactory(flushTupleWriterFactory);
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, flushLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ // LSMTree lsmtree = LSMTreeUtils.createLSMTree(10, 30, 2,
+ // memBufferCache, bufferCache, fileId, typeTraits,
+ // cmp.getComparators(), BTreeLeafFrameType.REGULAR_NSM,
+ // (IFileMapManager)fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ int resultSize = 100000;
+ int[][] resultArray = new int[resultSize][2];
+
+ // insert 0~250 tuples
+ System.out.printf("Start for 1st Insert\n");
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+ for (int i = 0; i < 251; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i;
+ }
+ for (int i = 0; i < 251; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test03:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ System.out.printf("Start for 2nd Insert\n");
+ // delete 126~250.
+ for (int i = 126; i < 251; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i;
+ }
+ for (int i = 126; i < 251; i++) {
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test03:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ // insert 251~1
+ for (int i = 251; i > 0; i--) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i;
+ }
+ for (int i = 251; i > 0; i--) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test03:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ // delete 100~0
+ for (int i = 100; i >= 0; i--) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i;
+ }
+ for (int i = 100; i >= 0; i--) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test03:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ // insert 1~50
+ for (int i = 1; i < 51; i++) {
+ resultArray[i][0] = i;
+ resultArray[i][1] = i;
+ }
+ for (int i = 1; i < 51; i++) {
+
+ int f0 = resultArray[i][0];
+ int f1 = resultArray[i][1];
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test03:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ lsmtree.merge();
+
+ // Output should be:
+ // In memory tree = 0->del, 1~50 ins
+ // MergedTree = 0->ins, 1~100->del, 101~251->ins
+ // Whole search = 1~50,101~251
+
+ // System.out.println("LSMTreeFlushTest: start print the first tree");
+ // lsmtree.scanDiskTree(1);
+ //
+ // Print out the first in Disk Btree
+ // System.out.println("LSMTreeFlushTest: start print the first tree");
+ // lsmtree.scanDiskTree(0);
+ // Print out the In-memory Tree
+ System.out.println("LSMTreeFlushTest: start print the In-memory tree");
+ lsmtree.scanInMemoryTree();
+ // TODO: scan whole tree
+ /*
+ * System.out.println("Range SEARCH:");
+ *
+ * BTreeOpContext searchOpCtx = lsmtree.createOpContext(IndexOp.SEARCH);
+ * ITreeIndexCursor rangeCursor = new LSMTreeRangeSearchCursor();
+ *
+ * // build low and high keys ArrayTupleBuilder ktb = new
+ * ArrayTupleBuilder(cmp.getKeyFieldCount()); DataOutput kdos =
+ * ktb.getDataOutput();
+ *
+ * ISerializerDeserializer[] keyDescSers = {
+ * IntegerSerializerDeserializer.INSTANCE }; RecordDescriptor keyDesc =
+ * new RecordDescriptor(keyDescSers); IFrameTupleAccessor keyAccessor =
+ * new FrameTupleAccessor( ctx.getFrameSize(), keyDesc);
+ * keyAccessor.reset(frame);
+ *
+ * appender.reset(frame, true);
+ *
+ * // build and append low key ktb.reset();
+ * IntegerSerializerDeserializer.INSTANCE.serialize(-1, kdos);
+ * ktb.addFieldEndOffset(); appender.append(ktb.getFieldEndOffsets(),
+ * ktb.getByteArray(), 0, ktb.getSize());
+ *
+ * // build and append high key ktb.reset();
+ * IntegerSerializerDeserializer.INSTANCE.serialize(300, kdos);
+ * ktb.addFieldEndOffset(); appender.append(ktb.getFieldEndOffsets(),
+ * ktb.getByteArray(), 0, ktb.getSize());
+ *
+ * // create tuplereferences for search keys FrameTupleReference lowKey
+ * = new FrameTupleReference(); lowKey.reset(keyAccessor, 0);
+ *
+ * FrameTupleReference highKey = new FrameTupleReference();
+ * highKey.reset(keyAccessor, 1);
+ *
+ * IBinaryComparator[] searchCmps = new IBinaryComparator[1];
+ * searchCmps[0] = IntegerBinaryComparatorFactory.INSTANCE
+ * .createBinaryComparator(); MultiComparator searchCmp = new
+ * MultiComparator(searchCmps);
+ *
+ * RangePredicate rangePred = new RangePredicate(true, lowKey, highKey,
+ * true, true, searchCmp, searchCmp); lsmtree.search(rangeCursor,
+ * rangePred, searchOpCtx);
+ *
+ * try { while (rangeCursor.hasNext()) { rangeCursor.next();
+ * ITupleReference frameTuple = rangeCursor.getTuple(); String rec =
+ * TupleUtils.printTuple(frameTuple, recDescSers); if
+ * (((LSMTypeAwareTupleReference) frameTuple).isDelete()) {
+ * System.out.println("del " + rec); } else { System.out.println("ins "
+ * + rec); } // System.out.println("------------------"); } } catch
+ * (Exception e) { e.printStackTrace(); } finally { rangeCursor.close();
+ * }
+ */
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+
+ System.out.printf("End of TEST1()\n");
+
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeOpContext.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeOpContext.java
new file mode 100644
index 0000000..d038ada
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeOpContext.java
@@ -0,0 +1,66 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOpContext;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.IndexOp;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+
+public final class LSMTreeOpContext extends BTreeOpContext {
+
+ public ITreeIndexFrameFactory insertLeafFrameFactory;
+ public ITreeIndexFrameFactory deleteLeafFrameFactory;
+ public IBTreeLeafFrame insertLeafFrame;
+ public IBTreeLeafFrame deleteLeafFrame;
+ public final BTree.BTreeAccessor memBtreeAccessor;
+
+ public LSMTreeOpContext(BTree.BTreeAccessor memBtreeAccessor, ITreeIndexFrameFactory insertLeafFrameFactory,
+ ITreeIndexFrameFactory deleteLeafFrameFactory, ITreeIndexFrameFactory interiorFrameFactory,
+ ITreeIndexMetaDataFrame metaFrame, MultiComparator cmp) {
+ super(insertLeafFrameFactory, interiorFrameFactory, metaFrame, cmp);
+
+ this.memBtreeAccessor = memBtreeAccessor;
+ // Overwrite the BTree accessor's op context with our LSMTreeOpContext.
+ this.memBtreeAccessor.setOpContext(this);
+
+ this.insertLeafFrameFactory = insertLeafFrameFactory;
+ this.deleteLeafFrameFactory = deleteLeafFrameFactory;
+ this.insertLeafFrame = (IBTreeLeafFrame) insertLeafFrameFactory.createFrame();
+ this.deleteLeafFrame = (IBTreeLeafFrame) deleteLeafFrameFactory.createFrame();
+
+ if (insertLeafFrame != null) {
+ insertLeafFrame.setMultiComparator(cmp);
+ }
+
+ if (deleteLeafFrame != null) {
+ deleteLeafFrame.setMultiComparator(cmp);
+ }
+
+ reset(op);
+ }
+
+ @Override
+ public void reset(IndexOp newOp) {
+ super.reset(newOp);
+ if(newOp == IndexOp.INSERT) {
+ setInsertMode();
+ }
+ if(newOp == IndexOp.DELETE) {
+ super.reset(IndexOp.INSERT);
+ setDeleteMode();
+ }
+ }
+
+ public void setInsertMode() {
+ this.leafFrame = insertLeafFrame;
+ leafFrameFactory = insertLeafFrameFactory;
+ }
+
+ public void setDeleteMode() {
+ this.leafFrame = deleteLeafFrame;
+ leafFrameFactory = deleteLeafFrameFactory;
+ }
+
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeRangeSearchCursor.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeRangeSearchCursor.java
new file mode 100644
index 0000000..eaa99ac
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeRangeSearchCursor.java
@@ -0,0 +1,182 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.util.PriorityQueue;
+
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeRangeSearchCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ICursorInitialState;
+import edu.uci.ics.hyracks.storage.am.common.api.ISearchPredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICachedPage;
+
+public class LSMTreeRangeSearchCursor implements ITreeIndexCursor {
+
+ private BTreeRangeSearchCursor[] rangeCursors;
+ private PriorityQueue<LSMPriorityQueueElement> outputPriorityQueue;
+ private MultiComparator cmp;
+ private LSMPriorityQueueElement outputElement;
+ private LSMPriorityQueueElement reusedElement;
+ private int numberOfTrees;
+ private boolean needPush;
+ private LSMTree lsm;
+
+ public LSMTreeRangeSearchCursor() {
+ outputElement = null;
+ needPush = false;
+ }
+
+ public void initPriorityQueue(int numberOfTrees, LSMPriorityQueueComparator LSMPriorityQueueCmp) throws Exception {
+ this.numberOfTrees = numberOfTrees;
+ outputPriorityQueue = new PriorityQueue<LSMPriorityQueueElement>(numberOfTrees, LSMPriorityQueueCmp);
+ for (int i = 0; i < numberOfTrees; i++) {
+ LSMPriorityQueueElement element;
+ if (rangeCursors[i].hasNext()) {
+ rangeCursors[i].next();
+ element = new LSMPriorityQueueElement(rangeCursors[i].getTuple(), i);
+ outputPriorityQueue.offer(element);
+ }
+ }
+ checkPriorityQueue();
+ }
+
+ public BTreeRangeSearchCursor getCursor(int cursorIndex) {
+ return rangeCursors[cursorIndex];
+ }
+
+ @Override
+ public void reset() {
+ // do nothing
+ }
+
+ @Override
+ public boolean hasNext() throws Exception {
+ checkPriorityQueue();
+ return !outputPriorityQueue.isEmpty();
+ }
+
+ @Override
+ public void next() throws Exception {
+
+ outputElement = outputPriorityQueue.poll();
+ needPush = true;
+ if (outputElement == null) {
+ throw new Exception("The outputPriorityQueue is empty");
+ }
+ }
+
+ @Override
+ public void open(ICursorInitialState initialState, ISearchPredicate searchPred) throws HyracksDataException {
+
+ lsm = ((LSMTreeCursorInitialState) initialState).getLsm();
+ cmp = ((LSMTreeCursorInitialState) initialState).getCmp();
+
+ rangeCursors = new BTreeRangeSearchCursor[((LSMTreeCursorInitialState) initialState).getNumberOfTrees()];
+
+ for (int i = 0; i < ((LSMTreeCursorInitialState) initialState).getNumberOfTrees(); i++) {
+ rangeCursors[i] = new BTreeRangeSearchCursor((IBTreeLeafFrame) ((LSMTreeCursorInitialState) initialState)
+ .getLeafFrameFactory().createFrame(), false);
+ }
+ }
+
+ @Override
+ public ICachedPage getPage() {
+ // do nothing
+ return null;
+ }
+
+ @Override
+ public void close() throws Exception {
+ lsm.decreaseThreadReferenceCounter();
+ outputPriorityQueue.clear();
+ for (int i = 0; i < numberOfTrees; i++) {
+ rangeCursors[i].close();
+ }
+ rangeCursors = null;
+ }
+
+ @Override
+ public void setBufferCache(IBufferCache bufferCache) {
+ // do nothing
+ }
+
+ @Override
+ public void setFileId(int fileId) {
+ // do nothing
+ }
+
+ @Override
+ public ITupleReference getTuple() {
+ return (ITupleReference) outputElement.getTuple();
+ }
+
+ private void pushIntoPriorityQueue(int cursorIndex) throws Exception {
+
+ if (rangeCursors[cursorIndex].hasNext()) {
+ rangeCursors[cursorIndex].next();
+ reusedElement.reset(rangeCursors[cursorIndex].getTuple(), cursorIndex);
+ outputPriorityQueue.offer(reusedElement);
+ }
+ }
+
+ private void checkPriorityQueue() throws Exception {
+
+ while (!outputPriorityQueue.isEmpty() || needPush == true) {
+ if (!outputPriorityQueue.isEmpty()) {
+ LSMPriorityQueueElement checkElement = outputPriorityQueue.peek();
+ // If there is no previous tuple or the previous tuple can be
+ // ignored
+ if (outputElement == null) {
+ // Test the tuple is a delete tuple or not
+ if (((LSMTypeAwareTupleReference) checkElement.getTuple()).isDelete() == true) {
+ // If the tuple is a delete tuple then pop it and mark
+ // it "needPush"
+ // Cannot push at this time because the tuple may be
+ // modified if "hasNext" is called
+ outputElement = outputPriorityQueue.poll();
+ needPush = true;
+ } else {
+ break;
+ }
+ } else {
+ // Compare the previous tuple and the head tuple in the PQ
+ if (cmp.compare(outputElement.getTuple(), checkElement.getTuple()) == 0) {
+ // If the previous tuple and the head tuple are
+ // identical
+ // then pop the head tuple and push the next tuple from
+ // the tree of head tuple
+
+ // the head element of PQ is useless now
+ reusedElement = outputPriorityQueue.poll();
+ // int treeNum = reusedElement.getTreeNum();
+ pushIntoPriorityQueue(reusedElement.getCursorIndex());
+ } else {
+ // If the previous tuple and the head tuple are
+ // different
+ // the info of previous tuple is useless
+ if (needPush == true) {
+ reusedElement = outputElement;
+ pushIntoPriorityQueue(outputElement.getCursorIndex());
+ needPush = false;
+ }
+ outputElement = null;
+ }
+ }
+ } else {
+ // the priority queue is empty and needPush
+ reusedElement = outputElement;
+ pushIntoPriorityQueue(outputElement.getCursorIndex());
+ needPush = false;
+ outputElement = null;
+ }
+ }
+ }
+
+ @Override
+ public boolean exclusiveLatchNodes() {
+ return false;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeSearchTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeSearchTest.java
new file mode 100644
index 0000000..d26ba3f
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTreeSearchTest.java
@@ -0,0 +1,409 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.nio.ByteBuffer;
+import java.util.Date;
+import java.util.Random;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.comm.IFrameTupleAccessor;
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.data.std.accessors.PointableBinaryComparatorFactory;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.FrameTupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.TupleUtils;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrame;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrame;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.btree.impls.RangePredicate;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexCursor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrame;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.freepage.LinkedListFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+// TODO: needs a big cleanup phase.
+public class LSMTreeSearchTest extends AbstractLSMTreeTest {
+
+ private static final int PAGE_SIZE = 256;
+ private static final int NUM_PAGES = 10;
+ private static final int MAX_OPEN_FILES = 100;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+ private IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+ @Test
+ public void Test1() throws Exception {
+ // in disk
+ TestStorageManagerComponentHolder.init(PAGE_SIZE, NUM_PAGES, MAX_OPEN_FILES);
+ IBufferCache bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ int fileId = fmp.lookupFileId(file);
+ bufferCache.openFile(fileId);
+
+ // in memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(PAGE_SIZE, NUM_PAGES);
+ IBufferCache memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ // declare keys
+ int keyFieldCount = 1;
+ IBinaryComparator[] cmps = new IBinaryComparator[keyFieldCount];
+ cmps[0] = PointableBinaryComparatorFactory.of(IntegerPointable.FACTORY).createBinaryComparator();
+
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = new BTreeNSMLeafFrameFactory(insertTupleWriterFactory);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = new BTreeNSMLeafFrameFactory(deleteTupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+
+ IBTreeLeafFrame insertLeafFrame = (IBTreeLeafFrame) insertLeafFrameFactory.createFrame();
+
+ IFreePageManager freePageManager = new LinkedListFreePageManager(bufferCache, fileId, 0, metaFrameFactory);
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(100, metaFrameFactory);
+
+ LSMEntireTupleWriterFactory flushTupleWriterFactory = new LSMEntireTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory flushLeafFrameFactory = new BTreeNSMLeafFrameFactory(flushTupleWriterFactory);
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, fieldCount,
+ interiorFrameFactory, flushLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memBufferCache, bufferCache, fieldCount, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory,
+ (IFileMapManager) fmp);
+
+ lsmtree.create(fileId);
+ lsmtree.open(fileId);
+
+ ByteBuffer frame = ctx.allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx.getFrameSize());
+
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(fieldCount);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] recDescSers = { IntegerSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor recDesc = new RecordDescriptor(recDescSers);
+
+ IFrameTupleAccessor accessor = new FrameTupleAccessor(ctx.getFrameSize(), recDesc);
+ accessor.reset(frame);
+
+ FrameTupleReference tuple = new FrameTupleReference();
+
+ ITreeIndexAccessor lsmTreeAccessor = lsmtree.createAccessor();
+
+ // delete
+ for (int i = 26; i < 36; i++) {
+
+ int f0 = i;
+ int f1 = -1;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.delete(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test01:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ for (int i = 21; i < 31; i++) {
+ int f0 = i;
+ int f1 = 0;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ try {
+ lsmTreeAccessor.insert(t);
+ } catch (TreeIndexException e) {
+ System.out.println("test01:" + e);
+ e.printStackTrace();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ // In disk insertions 1
+
+ LOGGER.info("Start in-disk insertions");
+
+ fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+ FileReference file_1 = new FileReference(new File(fileName));
+ bufferCache.createFile(file_1);
+ int fileId_1 = fmp.lookupFileId(file_1);
+ bufferCache.openFile(fileId_1);
+
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+
+ IBTreeLeafFrame leafFrame = (IBTreeLeafFrame) leafFrameFactory.createFrame();
+ IBTreeInteriorFrame interiorFrame = (IBTreeInteriorFrame) interiorFrameFactory.createFrame();
+ ITreeIndexMetaDataFrame metaFrame = metaFrameFactory.createFrame();
+
+ IFreePageManager freePageManager_1 = new LinkedListFreePageManager(bufferCache, fileId_1, 0, metaFrameFactory);
+
+ BTree btree_1 = new BTree(bufferCache, fieldCount, cmp, freePageManager_1, interiorFrameFactory,
+ leafFrameFactory);
+ btree_1.create(fileId_1);
+ btree_1.open(fileId_1);
+
+ // TODO: rename this one.
+ InDiskTreeInfo info_1 = new InDiskTreeInfo(btree_1);
+ lsmtree.inDiskTreeInfoList.add(info_1);
+
+ Random rnd = new Random();
+ rnd.setSeed(50);
+
+ LOGGER.info("INSERTING INTO TREE");
+
+ // ByteBuffer
+ frame = ctx.allocateFrame();
+ // FrameTupleAppender
+ appender = new FrameTupleAppender(ctx.getFrameSize());
+ // ArrayTupleBuilder
+ tb = new ArrayTupleBuilder(fieldCount);
+ // DataOutput
+ dos = tb.getDataOutput();
+
+ recDesc = new RecordDescriptor(recDescSers);
+
+ accessor.reset(frame);
+
+ tuple = new FrameTupleReference();
+
+ ITreeIndexAccessor indexAccessor_1 = btree_1.createAccessor();
+
+ // 10000
+ for (int i = 16; i < 41; i++) {
+
+ int f0 = i;
+ int f1 = 1;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ if (i % 10 == 0) {
+ System.out.println("INSERTING " + i + " : " + f0 + " " + f1);
+ }
+
+ try {
+ indexAccessor_1.insert(t);
+ } catch (TreeIndexException e) {
+ e.printStackTrace();
+ System.out.println("Error: " + e);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ // btree_1.close();
+
+ // In disk insertions 2
+
+ LOGGER.info("Start in-disk insertions");
+
+ fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+ FileReference file_2 = new FileReference(new File(fileName));
+ bufferCache.createFile(file_2);
+ int fileId_2 = fmp.lookupFileId(file_2);
+ bufferCache.openFile(fileId_2);
+
+ IFreePageManager freePageManager_2 = new LinkedListFreePageManager(bufferCache, fileId_2, 0, metaFrameFactory);
+ BTree btree_2 = new BTree(bufferCache, fieldCount, cmp, freePageManager_2, interiorFrameFactory,
+ leafFrameFactory);
+ btree_2.create(fileId_2);
+ btree_2.open(fileId_2);
+
+ InDiskTreeInfo info_2 = new InDiskTreeInfo(btree_2);
+ lsmtree.inDiskTreeInfoList.add(info_2);
+
+ LOGGER.info("INSERTING INTO TREE");
+
+ // ByteBuffer
+ frame = ctx.allocateFrame();
+ // FrameTupleAppender
+ appender = new FrameTupleAppender(ctx.getFrameSize());
+ // ArrayTupleBuilder
+ tb = new ArrayTupleBuilder(fieldCount);
+ // DataOutput
+ dos = tb.getDataOutput();
+
+ recDesc = new RecordDescriptor(recDescSers);
+
+ accessor.reset(frame);
+
+ tuple = new FrameTupleReference();
+
+ ITreeIndexAccessor indexAccessor_2 = btree_2.createAccessor();
+
+ // 10000
+ for (int i = 11; i < 51; i++) {
+
+ int f0 = i;
+ int f1 = 2;
+
+ tb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f0, dos);
+ tb.addFieldEndOffset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(f1, dos);
+ tb.addFieldEndOffset();
+
+ appender.reset(frame, true);
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ tuple.reset(accessor, 0);
+
+ ArrayTupleReference t = new ArrayTupleReference();
+ t.reset(tb.getFieldEndOffsets(), tb.getByteArray());
+
+ if (i % 10 == 0) {
+ System.out.println("INSERTING " + i + " : " + f0 + " " + f1);
+ }
+
+ try {
+ indexAccessor_2.insert(t);
+ } catch (TreeIndexException e) {
+ e.printStackTrace();
+ System.out.println("Error: " + e);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ // btree_2.close();
+
+ // range search in [-1000, 1000]
+ LOGGER.info("RANGE SEARCH:");
+
+ ITreeIndexCursor rangeCursor = new LSMTreeRangeSearchCursor();
+
+ // build low and high keys
+ ArrayTupleBuilder ktb = new ArrayTupleBuilder(cmp.getKeyFieldCount());
+ DataOutput kdos = ktb.getDataOutput();
+
+ ISerializerDeserializer[] keyDescSers = { IntegerSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyDesc = new RecordDescriptor(keyDescSers);
+ IFrameTupleAccessor keyAccessor = new FrameTupleAccessor(ctx.getFrameSize(), keyDesc);
+ keyAccessor.reset(frame);
+
+ appender.reset(frame, true);
+
+ // build and append low key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(-100, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // build and append high key
+ ktb.reset();
+ IntegerSerializerDeserializer.INSTANCE.serialize(100, kdos);
+ ktb.addFieldEndOffset();
+ appender.append(ktb.getFieldEndOffsets(), ktb.getByteArray(), 0, ktb.getSize());
+
+ // create tuplereferences for search keys
+ FrameTupleReference lowKey = new FrameTupleReference();
+ lowKey.reset(keyAccessor, 0);
+
+ FrameTupleReference highKey = new FrameTupleReference();
+ highKey.reset(keyAccessor, 1);
+
+ IBinaryComparator[] searchCmps = cmps;
+ MultiComparator searchCmp = new MultiComparator(searchCmps);
+
+ RangePredicate rangePred = new RangePredicate(true, lowKey, highKey, true, true, searchCmp, searchCmp);
+ lsmTreeAccessor.search(rangeCursor, rangePred);
+
+ try {
+ while (rangeCursor.hasNext()) {
+ rangeCursor.next();
+ ITupleReference frameTuple = rangeCursor.getTuple();
+ String rec = TupleUtils.printTuple(frameTuple, recDescSers);
+ if (((LSMTypeAwareTupleReference) frameTuple).isDelete()) {
+ System.out.println("del " + rec);
+ } else {
+ System.out.println("ins " + rec);
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ rangeCursor.close();
+ }
+
+ lsmtree.close();
+ bufferCache.closeFile(fileId);
+ memBufferCache.close();
+ }
+}
\ No newline at end of file
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleReference.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleReference.java
new file mode 100644
index 0000000..90b564f
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleReference.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
+
+public class LSMTypeAwareTupleReference extends TypeAwareTupleReference implements ILSMTreeTupleReference {
+
+ public LSMTypeAwareTupleReference(ITypeTraits[] typeTraits) {
+ super(typeTraits);
+ }
+
+ @Override
+ protected int getNullFlagsBytes() {
+ //+1.0 is for insert/delete tuple checking
+ return (int) Math.ceil((fieldCount + 1.0) / 8.0);
+ }
+
+ @Override
+ public boolean isDelete() {
+ byte[] temp = buf.array();
+ byte firstByte = temp[tupleStartOff];
+ final byte mask = (byte) (1 << 7);
+ final byte compare = (byte) (1 << 7);
+
+ //check the first bit is 0 or 1
+ if((byte)(firstByte & mask) == compare) {
+ return true;
+ }
+ else {
+ return false;
+ }
+ }
+
+ public int getTupleStart() {
+ return tupleStartOff;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleReferenceTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleReferenceTest.java
new file mode 100644
index 0000000..815187e
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleReferenceTest.java
@@ -0,0 +1,71 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import static org.junit.Assert.fail;
+
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+
+public class LSMTypeAwareTupleReferenceTest {
+
+ @Test
+ public void test01() throws Exception {
+
+ int targetOff = 0;
+ ByteBuffer buf = ByteBuffer.allocate(32);
+
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ ITreeIndexTupleWriter insertTupleWriter = insertTupleWriterFactory.createTupleWriter();
+ ITreeIndexTupleReference lsmTupleReference = insertTupleWriter.createTupleReference();
+
+ lsmTupleReference.resetByTupleOffset(buf, targetOff);
+ insertTupleWriter.writeTuple(lsmTupleReference, buf, targetOff);
+
+ boolean del = ((LSMTypeAwareTupleReference) lsmTupleReference).isDelete();
+
+ if(del == false) {
+ return;
+ }
+ else {
+ fail("fail to write tuple");
+ }
+ }
+
+ @Test
+ public void test02() throws Exception {
+
+ int targetOff = 0;
+ ByteBuffer buf = ByteBuffer.allocate(32);
+
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+ ITreeIndexTupleWriter deleteTupleWriter = deleteTupleWriterFactory.createTupleWriter();
+ ITreeIndexTupleReference lsmTupleReference = deleteTupleWriter.createTupleReference();
+
+ lsmTupleReference.resetByTupleOffset(buf, targetOff);
+ deleteTupleWriter.writeTuple(lsmTupleReference, buf, targetOff);
+
+ boolean del = ((LSMTypeAwareTupleReference) lsmTupleReference).isDelete();
+
+ if(del == true) {
+ return;
+ }
+ else {
+ fail("fail to write tuple");
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriter.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriter.java
new file mode 100644
index 0000000..25411a0
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriter.java
@@ -0,0 +1,47 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+
+public class LSMTypeAwareTupleWriter extends TypeAwareTupleWriter {
+ private final boolean isDelete;
+
+ public LSMTypeAwareTupleWriter(ITypeTraits[] typeTraits, boolean isDelete) {
+ super(typeTraits);
+ this.isDelete = isDelete;
+ }
+
+ @Override
+ public ITreeIndexTupleReference createTupleReference() {
+ return new LSMTypeAwareTupleReference(typeTraits);
+ }
+
+ @Override
+ protected int getNullFlagsBytes(int numFields) {
+ //+1.0 is for insert/delete tuple checking
+ return (int) Math.ceil(((double) numFields + 1.0)/ 8.0);
+ }
+
+ @Override
+ protected int getNullFlagsBytes(ITupleReference tuple) {
+ //+1.0 is for insert/delete tuple checking
+ return (int) Math.ceil(((double) tuple.getFieldCount() + 1.0) / 8.0);
+ }
+
+ @Override
+ public int writeTuple(ITupleReference tuple, byte[] targetBuf, int targetOff) {
+ int bytesWritten = super.writeTuple(tuple, targetBuf, targetOff);
+ if(isDelete) {
+ setDeleteBit(targetBuf, targetOff);
+ }
+ return bytesWritten;
+ }
+
+ private void setDeleteBit(byte[] targetBuf, int targetOff) {
+ byte firstByte = targetBuf[targetOff];
+ firstByte = (byte) (firstByte | (1 << 7));
+ targetBuf[targetOff] = firstByte;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterFactory.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterFactory.java
new file mode 100644
index 0000000..c2eb30e
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterFactory.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+
+public class LSMTypeAwareTupleWriterFactory extends TypeAwareTupleWriterFactory {
+
+ private static final long serialVersionUID = 1L;
+ private ITypeTraits[] typeTraits;
+ private final boolean isDelete;
+
+ public LSMTypeAwareTupleWriterFactory(ITypeTraits[] typeTraits, boolean isDelete) {
+ super(typeTraits);
+ this.typeTraits = typeTraits;
+ this.isDelete = isDelete;
+ }
+
+ @Override
+ public ITreeIndexTupleWriter createTupleWriter() {
+ return new LSMTypeAwareTupleWriter(typeTraits, isDelete);
+ }
+
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterFactoryTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterFactoryTest.java
new file mode 100644
index 0000000..43879c0
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterFactoryTest.java
@@ -0,0 +1,31 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import static org.junit.Assert.fail;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+
+public class LSMTypeAwareTupleWriterFactoryTest {
+ @Test
+ public void test01() throws Exception {
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ LSMTypeAwareTupleWriterFactory lsmTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ ITreeIndexTupleWriter lsmTupleWriter = lsmTupleWriterFactory.createTupleWriter();
+
+ if(lsmTupleWriter != null) {
+ return;
+ }
+ else {
+ fail("fail to create LSMTypeAwareTupleWriter!");
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterTest.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterTest.java
new file mode 100644
index 0000000..91bb6e1
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/impls/LSMTypeAwareTupleWriterTest.java
@@ -0,0 +1,101 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.impls;
+
+import static org.junit.Assert.fail;
+
+import java.nio.ByteBuffer;
+
+import org.junit.Test;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.data.std.primitive.IntegerPointable;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexTupleWriter;
+
+public class LSMTypeAwareTupleWriterTest {
+
+ // test create tuple reference
+ @Test
+ public void test01() throws Exception {
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ LSMTypeAwareTupleWriterFactory lsmTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(
+ typeTraits, false);
+ ITreeIndexTupleWriter lsmTupleWriter = lsmTupleWriterFactory.createTupleWriter();
+
+ ITreeIndexTupleReference lsmTupleReference = lsmTupleWriter.createTupleReference();
+
+ if (lsmTupleReference != null) {
+ return;
+ } else {
+ fail("fail to create LSMTypeAwareTupleWriter");
+ }
+ }
+
+ // test insert tuple writer
+ @Test
+ public void test02() throws Exception {
+
+ int targetOff = 0;
+ ByteBuffer buf = ByteBuffer.allocate(32);
+
+ // declare fields
+ int fieldCount = 2;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(
+ typeTraits, false);
+ ITreeIndexTupleWriter insertTupleWriter = insertTupleWriterFactory.createTupleWriter();
+ ITreeIndexTupleReference insertTupleReference = insertTupleWriter.createTupleReference();
+
+ insertTupleReference.resetByTupleOffset(buf, targetOff);
+
+ int num = insertTupleWriter.writeTuple(insertTupleReference, buf, targetOff);
+
+ boolean del = ((LSMTypeAwareTupleReference) insertTupleReference).isDelete();
+
+ if (num == 9 && del == false) {
+ return;
+ } else {
+ fail("fail to write tuple");
+ }
+ }
+
+ // test delete tuple writer
+ @Test
+ public void test03() throws Exception {
+
+ int targetOff = 0;
+ ByteBuffer buf = ByteBuffer.allocate(32);
+
+ // declare fields
+ int fieldCount = 3;
+ ITypeTraits[] typeTraits = new ITypeTraits[fieldCount];
+ typeTraits[0] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[1] = IntegerPointable.TYPE_TRAITS;
+ typeTraits[2] = IntegerPointable.TYPE_TRAITS;
+
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(
+ typeTraits, true);
+ ITreeIndexTupleWriter deleteTupleWriter = deleteTupleWriterFactory.createTupleWriter();
+ ITreeIndexTupleReference deleteTupleReference = deleteTupleWriter.createTupleReference();
+
+ deleteTupleReference.resetByTupleOffset(buf, targetOff);
+
+ int num = deleteTupleWriter.writeTuple(deleteTupleReference, buf, targetOff);
+
+ boolean del = ((LSMTypeAwareTupleReference) deleteTupleReference).isDelete();
+
+ if (num == 13 && del == true) {
+ return;
+ } else {
+ fail("fail to write tuple");
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreeBulkLoadRunner.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreeBulkLoadRunner.java
new file mode 100644
index 0000000..d3ae961
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreeBulkLoadRunner.java
@@ -0,0 +1,37 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.common.api.IIndexBulkLoadContext;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.TupleBatch;
+
+public class BTreeBulkLoadRunner extends BTreeRunner {
+
+ protected final float fillFactor;
+
+ public BTreeBulkLoadRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits, MultiComparator cmp, float fillFactor)
+ throws HyracksDataException, BTreeException {
+ super(numBatches, pageSize, numPages, typeTraits, cmp);
+ this.fillFactor = fillFactor;
+ }
+
+ @Override
+ public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception {
+ btree.create(btreeFileId);
+ long start = System.currentTimeMillis();
+ IIndexBulkLoadContext bulkLoadCtx = btree.beginBulkLoad(1.0f);
+ for (int i = 0; i < numBatches; i++) {
+ TupleBatch batch = dataGen.tupleBatchQueue.take();
+ for (int j = 0; j < batch.size(); j++) {
+ btree.bulkLoadAddTuple(batch.get(j), bulkLoadCtx);
+ }
+ }
+ btree.endBulkLoad(bulkLoadCtx);
+ long end = System.currentTimeMillis();
+ long time = end - start;
+ return time;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreePageSizePerf.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreePageSizePerf.java
new file mode 100644
index 0000000..ff02aa1
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreePageSizePerf.java
@@ -0,0 +1,72 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import java.util.Enumeration;
+import java.util.logging.Level;
+import java.util.logging.LogManager;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+
+public class BTreePageSizePerf {
+ public static void main(String[] args) throws Exception {
+ // Disable logging so we can better see the output times.
+ Enumeration<String> loggers = LogManager.getLogManager().getLoggerNames();
+ while(loggers.hasMoreElements()) {
+ String loggerName = loggers.nextElement();
+ Logger logger = LogManager.getLogManager().getLogger(loggerName);
+ logger.setLevel(Level.OFF);
+ }
+
+ int numTuples = 1000000;
+ int batchSize = 10000;
+ int numBatches = numTuples / batchSize;
+
+ ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
+ ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, 30);
+
+ IBinaryComparator[] cmps = SerdeUtils.serdesToComparators(fieldSerdes, fieldSerdes.length);
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ runExperiment(numBatches, batchSize, 1024, 100000, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 2048, 100000, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 4096, 25000, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 8192, 12500, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 16384, 6250, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 32768, 3125, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 65536, 1564, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 131072, 782, fieldSerdes, cmp, typeTraits);
+ runExperiment(numBatches, batchSize, 262144, 391, fieldSerdes, cmp, typeTraits);
+ }
+
+ private static void runExperiment(int numBatches, int batchSize, int pageSize, int numPages, ISerializerDeserializer[] fieldSerdes, MultiComparator cmp, ITypeTraits[] typeTraits) throws Exception {
+ System.out.println("PAGE SIZE: " + pageSize);
+ System.out.println("NUM PAGES: " + numPages);
+ System.out.println("MEMORY: " + (pageSize * numPages));
+ int repeats = 5;
+ long[] times = new long[repeats];
+ //BTreeRunner runner = new BTreeRunner(numTuples, pageSize, numPages, typeTraits, cmp);
+ InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, pageSize, numPages, typeTraits, cmp);
+ runner.init();
+ int numThreads = 1;
+ for (int i = 0; i < repeats; i++) {
+ DataGenThread dataGen = new DataGenThread(numBatches, batchSize, 10, numThreads, fieldSerdes, 30, 50, false);
+ dataGen.start();
+ times[i] = runner.runExperiment(dataGen, numThreads);
+ System.out.println("TIME " + i + ": " + times[i] + "ms");
+ }
+ runner.deinit();
+ long avgTime = 0;
+ for (int i = 0; i < repeats; i++) {
+ avgTime += times[i];
+ }
+ avgTime /= repeats;
+ System.out.println("AVG TIME: " + avgTime + "ms");
+ System.out.println("-------------------------------");
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreeRunner.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreeRunner.java
new file mode 100644
index 0000000..46c21e5
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/BTreeRunner.java
@@ -0,0 +1,38 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import java.io.File;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class BTreeRunner extends InMemoryBTreeRunner {
+ protected static final int MAX_OPEN_FILES = 10;
+ protected static final int HYRACKS_FRAME_SIZE = 128;
+
+ public BTreeRunner(int numTuples, int pageSize, int numPages, ITypeTraits[] typeTraits, MultiComparator cmp) throws HyracksDataException, BTreeException {
+ super(numTuples, pageSize, numPages, typeTraits, cmp);
+ }
+
+ @Override
+ protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, MultiComparator cmp) throws HyracksDataException, BTreeException {
+ IHyracksTaskContext ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+ TestStorageManagerComponentHolder.init(pageSize, numPages, MAX_OPEN_FILES);
+ bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ btreeFileId = fmp.lookupFileId(file);
+ bufferCache.openFile(btreeFileId);
+ btree = BTreeUtils
+ .createBTree(bufferCache, btreeFileId, typeTraits, cmp.getComparators(), BTreeLeafFrameType.REGULAR_NSM);
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/ConcurrentSkipListRunner.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/ConcurrentSkipListRunner.java
new file mode 100644
index 0000000..8f90902
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/ConcurrentSkipListRunner.java
@@ -0,0 +1,123 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import java.nio.ByteBuffer;
+import java.util.Comparator;
+import java.util.concurrent.ConcurrentSkipListSet;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.TupleBatch;
+
+public class ConcurrentSkipListRunner implements IExperimentRunner {
+ public class TupleComparator implements Comparator<ITupleReference> {
+ private final MultiComparator cmp;
+
+ public TupleComparator(MultiComparator cmp) {
+ this.cmp = cmp;
+ }
+
+ @Override
+ public int compare(ITupleReference o1, ITupleReference o2) {
+ return cmp.compare(o1, o2);
+ }
+ }
+
+ private final TupleComparator tupleCmp;
+ private final int numBatches;
+ private final int batchSize;
+ private final int tupleSize;
+ private final ITypeTraits[] typeTraits;
+
+ public ConcurrentSkipListRunner(int numBatches, int batchSize, int tupleSize, ITypeTraits[] typeTraits, MultiComparator cmp) {
+ this.numBatches = numBatches;
+ this.tupleSize = tupleSize;
+ this.batchSize = batchSize;
+ this.typeTraits = typeTraits;
+ tupleCmp = new TupleComparator(cmp);
+ }
+
+ @Override
+ public long runExperiment(DataGenThread dataGen, int numThreads) throws InterruptedException {
+ ConcurrentSkipListSet<ITupleReference> skipList = new ConcurrentSkipListSet<ITupleReference>(tupleCmp);
+ SkipListThread[] threads = new SkipListThread[numThreads];
+ int threadNumBatches = numBatches / numThreads;
+ for (int i = 0; i < numThreads; i++) {
+ threads[i] = new SkipListThread(dataGen, skipList, threadNumBatches, batchSize);
+ }
+ // Wait until the tupleBatchQueue is completely full.
+ while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
+ Thread.sleep(10);
+ }
+
+ long start = System.currentTimeMillis();
+ for (int i = 0; i < numThreads; i++) {
+ threads[i].start();
+ }
+ for (int i = 0; i < numThreads; i++) {
+ threads[i].join();
+ }
+ long end = System.currentTimeMillis();
+ long time = end - start;
+ return time;
+ }
+
+ @Override
+ public void init() throws Exception {
+ }
+
+ @Override
+ public void deinit() throws Exception {
+ }
+
+ public void reset() throws Exception {
+ }
+
+ public class SkipListThread extends Thread {
+ private final DataGenThread dataGen;
+ private final ConcurrentSkipListSet<ITupleReference> skipList;
+ private final int numBatches;
+ public final TypeAwareTupleWriterFactory tupleWriterFactory;
+ public final TypeAwareTupleWriter tupleWriter;
+ public final TypeAwareTupleReference[] tuples;
+ public final ByteBuffer tupleBuf;
+
+ public SkipListThread(DataGenThread dataGen, ConcurrentSkipListSet<ITupleReference> skipList, int numBatches, int batchSize) {
+ this.dataGen = dataGen;
+ this.numBatches = numBatches;
+ this.skipList = skipList;
+ tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ tupleWriter = (TypeAwareTupleWriter) tupleWriterFactory.createTupleWriter();
+ int numTuples = numBatches * batchSize;
+ tuples = new TypeAwareTupleReference[numTuples];
+ tupleBuf = ByteBuffer.allocate(numTuples * tupleSize);
+ for (int i = 0; i < numTuples; i++) {
+ tuples[i] = (TypeAwareTupleReference) tupleWriter.createTupleReference();
+ }
+ }
+
+ @Override
+ public void run() {
+ int tupleIndex = 0;
+ try {
+ for (int i = 0; i < numBatches; i++) {
+ TupleBatch batch = dataGen.tupleBatchQueue.take();
+ for (int j = 0; j < batch.size(); j++) {
+ // Copy the tuple to the buffer and set the pre-created tuple ref.
+ tupleWriter.writeTuple(batch.get(j), tupleBuf.array(), tupleIndex * tupleSize);
+ tuples[tupleIndex].resetByTupleOffset(tupleBuf, tupleIndex * tupleSize);
+ skipList.add(tuples[tupleIndex]);
+ tupleIndex++;
+ }
+ }
+ } catch (Exception e) {
+ System.out.println(tupleIndex);
+ e.printStackTrace();
+ }
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/IExperimentRunner.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/IExperimentRunner.java
new file mode 100644
index 0000000..3533d24
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/IExperimentRunner.java
@@ -0,0 +1,15 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+
+public interface IExperimentRunner {
+ public static int DEFAULT_MAX_OUTSTANDING = 100000;
+
+ public void init() throws Exception;
+
+ public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception;
+
+ public void reset() throws Exception;
+
+ public void deinit() throws Exception;
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/InMemoryBTreeRunner.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/InMemoryBTreeRunner.java
new file mode 100644
index 0000000..b6a5170
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/InMemoryBTreeRunner.java
@@ -0,0 +1,125 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTree;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.TupleBatch;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.InMemoryBufferCache;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.InMemoryFreePageManager;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ICacheMemoryAllocator;
+
+public class InMemoryBTreeRunner extends Thread implements IExperimentRunner {
+ protected IBufferCache bufferCache;
+ protected int btreeFileId;
+
+ protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+ protected final static String sep = System.getProperty("file.separator");
+ protected String fileName;
+
+ protected final int numBatches;
+ protected BTree btree;
+
+ public InMemoryBTreeRunner(int numBatches, int pageSize, int numPages, ITypeTraits[] typeTraits, MultiComparator cmp) throws HyracksDataException, BTreeException {
+ this.numBatches = numBatches;
+ fileName = tmpDir + sep + simpleDateFormat.format(new Date());
+ init(pageSize, numPages, typeTraits, cmp);
+ }
+
+ protected void init(int pageSize, int numPages, ITypeTraits[] typeTraits, MultiComparator cmp) throws HyracksDataException, BTreeException {
+ ICacheMemoryAllocator allocator = new HeapBufferAllocator();
+ bufferCache = new InMemoryBufferCache(allocator, pageSize, numPages);
+ // Chose an aribtrary file id.
+ btreeFileId = 0;
+ TypeAwareTupleWriterFactory tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory leafFrameFactory = new BTreeNSMLeafFrameFactory(tupleWriterFactory);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(tupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ IFreePageManager freePageManager = new InMemoryFreePageManager(bufferCache.getNumPages(), metaFrameFactory);
+ btree = new BTree(bufferCache, typeTraits.length, cmp, freePageManager, interiorFrameFactory, leafFrameFactory);
+ }
+
+ @Override
+ public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception {
+ BTreeThread[] threads = new BTreeThread[numThreads];
+ int threadNumBatches = numBatches / numThreads;
+ for (int i = 0; i < numThreads; i++) {
+ threads[i] = new BTreeThread(dataGen, btree, threadNumBatches);
+ }
+ // Wait until the tupleBatchQueue is completely full.
+ while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
+ Thread.sleep(10);
+ }
+
+ long start = System.currentTimeMillis();
+ for (int i = 0; i < numThreads; i++) {
+ threads[i].start();
+ }
+ for (int i = 0; i < numThreads; i++) {
+ threads[i].join();
+ }
+ long end = System.currentTimeMillis();
+ long time = end - start;
+ return time;
+ }
+
+ @Override
+ public void init() throws Exception {
+ }
+
+ @Override
+ public void deinit() throws Exception {
+ bufferCache.closeFile(btreeFileId);
+ bufferCache.close();
+ }
+
+ @Override
+ public void reset() throws Exception {
+ btree.create(btreeFileId);
+ }
+
+ public class BTreeThread extends Thread {
+ private final DataGenThread dataGen;
+ private final int numBatches;
+ private final ITreeIndexAccessor indexAccessor;
+ public BTreeThread(DataGenThread dataGen, BTree btree, int numBatches) {
+ this.dataGen = dataGen;
+ this.numBatches = numBatches;
+ indexAccessor = btree.createAccessor();
+ }
+
+ @Override
+ public void run() {
+ try {
+ for (int i = 0; i < numBatches; i++) {
+ TupleBatch batch = dataGen.tupleBatchQueue.take();
+ for (int j = 0; j < batch.size(); j++) {
+ try {
+ indexAccessor.insert(batch.get(j));
+ } catch (TreeIndexException e) {
+ }
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/InMemorySortRunner.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/InMemorySortRunner.java
new file mode 100644
index 0000000..90c5c32
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/InMemorySortRunner.java
@@ -0,0 +1,138 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import java.nio.ByteBuffer;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.concurrent.ConcurrentSkipListSet;
+
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.accessors.ITupleReference;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleReference;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriter;
+import edu.uci.ics.hyracks.storage.am.common.tuples.TypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.TupleBatch;
+
+public class InMemorySortRunner implements IExperimentRunner {
+ public class TupleComparator implements Comparator<ITupleReference> {
+ private final MultiComparator cmp;
+
+ public TupleComparator(MultiComparator cmp) {
+ this.cmp = cmp;
+ }
+
+ @Override
+ public int compare(ITupleReference o1, ITupleReference o2) {
+ return cmp.compare(o1, o2);
+ }
+ }
+
+ private final TupleComparator tupleCmp;
+ private final int numBatches;
+ private final int batchSize;
+ private final int tupleSize;
+ private final ITypeTraits[] typeTraits;
+
+ private final TypeAwareTupleWriterFactory tupleWriterFactory;
+ private final TypeAwareTupleWriter tupleWriter;
+ private final ArrayList<TypeAwareTupleReference> tuples;
+ private final ByteBuffer tupleBuf;
+
+ public InMemorySortRunner(int numBatches, int batchSize, int tupleSize, ITypeTraits[] typeTraits, MultiComparator cmp) {
+ this.numBatches = numBatches;
+ this.tupleSize = tupleSize;
+ this.batchSize = batchSize;
+ this.typeTraits = typeTraits;
+ tupleCmp = new TupleComparator(cmp);
+ tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ tupleWriter = (TypeAwareTupleWriter) tupleWriterFactory.createTupleWriter();
+ int numTuples = numBatches * batchSize;
+ tuples = new ArrayList<TypeAwareTupleReference>();
+ tupleBuf = ByteBuffer.allocate(numTuples * tupleSize);
+ for (int i = 0; i < numTuples; i++) {
+ tuples.add((TypeAwareTupleReference) tupleWriter.createTupleReference());
+ }
+ }
+
+ @Override
+ public long runExperiment(DataGenThread dataGen, int numThreads) throws InterruptedException {
+ // Wait until the tupleBatchQueue is completely full.
+ while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
+ Thread.sleep(10);
+ }
+
+ long start = System.currentTimeMillis();
+ int tupleIndex = 0;
+ for (int i = 0; i < numBatches; i++) {
+ TupleBatch batch = dataGen.tupleBatchQueue.take();
+ for (int j = 0; j < batch.size(); j++) {
+ // Copy the tuple to the buffer and set the pre-created tuple ref.
+ tupleWriter.writeTuple(batch.get(j), tupleBuf.array(), tupleIndex * tupleSize);
+ tuples.get(tupleIndex).resetByTupleOffset(tupleBuf, tupleIndex * tupleSize);
+ tupleIndex++;
+ }
+ }
+ // Perform the sort.
+ Collections.sort(tuples, tupleCmp);
+ long end = System.currentTimeMillis();
+ long time = end - start;
+ return time;
+ }
+
+ @Override
+ public void init() throws Exception {
+ }
+
+ @Override
+ public void deinit() throws Exception {
+ }
+
+ public void reset() throws Exception {
+ }
+
+ public class SkipListThread extends Thread {
+ private final DataGenThread dataGen;
+ private final ConcurrentSkipListSet<ITupleReference> skipList;
+ private final int numBatches;
+ public final TypeAwareTupleWriterFactory tupleWriterFactory;
+ public final TypeAwareTupleWriter tupleWriter;
+ public final TypeAwareTupleReference[] tuples;
+ public final ByteBuffer tupleBuf;
+
+ public SkipListThread(DataGenThread dataGen, ConcurrentSkipListSet<ITupleReference> skipList, int numBatches, int batchSize) {
+ this.dataGen = dataGen;
+ this.numBatches = numBatches;
+ this.skipList = skipList;
+ tupleWriterFactory = new TypeAwareTupleWriterFactory(typeTraits);
+ tupleWriter = (TypeAwareTupleWriter) tupleWriterFactory.createTupleWriter();
+ int numTuples = numBatches * batchSize;
+ tuples = new TypeAwareTupleReference[numTuples];
+ tupleBuf = ByteBuffer.allocate(numTuples * tupleSize);
+ for (int i = 0; i < numTuples; i++) {
+ tuples[i] = (TypeAwareTupleReference) tupleWriter.createTupleReference();
+ }
+ }
+
+ @Override
+ public void run() {
+ int tupleIndex = 0;
+ try {
+ for (int i = 0; i < numBatches; i++) {
+ TupleBatch batch = dataGen.tupleBatchQueue.take();
+ for (int j = 0; j < batch.size(); j++) {
+ // Copy the tuple to the buffer and set the pre-created tuple ref.
+ tupleWriter.writeTuple(batch.get(j), tupleBuf.array(), tupleIndex * tupleSize);
+ tuples[tupleIndex].resetByTupleOffset(tupleBuf, tupleIndex * tupleSize);
+ skipList.add(tuples[tupleIndex]);
+ tupleIndex++;
+ }
+ }
+ } catch (Exception e) {
+ System.out.println(tupleIndex);
+ e.printStackTrace();
+ }
+ }
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/LSMTreeRunner.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/LSMTreeRunner.java
new file mode 100644
index 0000000..0d537b6
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/LSMTreeRunner.java
@@ -0,0 +1,143 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import java.io.File;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import edu.uci.ics.hyracks.api.context.IHyracksTaskContext;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.io.FileReference;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexAccessor;
+import edu.uci.ics.hyracks.storage.am.common.api.TreeIndexException;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.TupleBatch;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.InMemoryBufferCacheFactory;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.LSMTree;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapProvider;
+import edu.uci.ics.hyracks.test.support.TestStorageManagerComponentHolder;
+import edu.uci.ics.hyracks.test.support.TestUtils;
+
+public class LSMTreeRunner implements IExperimentRunner {
+
+ private static final int MAX_OPEN_FILES = 10000;
+ private static final int HYRACKS_FRAME_SIZE = 128;
+
+ protected IHyracksTaskContext ctx;
+ protected IBufferCache bufferCache;
+ protected int lsmtreeFileId;
+
+ protected final static SimpleDateFormat simpleDateFormat = new SimpleDateFormat("ddMMyy-hhmmssSS");
+ protected final static String tmpDir = System.getProperty("java.io.tmpdir");
+ protected final static String sep = System.getProperty("file.separator");
+ protected final static String classDir = "/lsmtree/";
+ protected String fileName;
+
+ protected final int numBatches;
+ protected final LSMTree lsmtree;
+ protected IBufferCache memBufferCache;
+ private final int onDiskPageSize;
+ private final int onDiskNumPages;
+
+ public LSMTreeRunner(int numBatches, int inMemPageSize, int inMeNumPages, int onDiskPageSize, int onDiskNumPages, ITypeTraits[] typeTraits, MultiComparator cmp) throws HyracksDataException, BTreeException {
+ this.numBatches = numBatches;
+
+ this.onDiskPageSize = onDiskPageSize;
+ this.onDiskNumPages = onDiskNumPages;
+
+ fileName = tmpDir + classDir + sep + simpleDateFormat.format(new Date());
+ ctx = TestUtils.create(HYRACKS_FRAME_SIZE);
+
+
+ TestStorageManagerComponentHolder.init(this.onDiskPageSize, this.onDiskNumPages, MAX_OPEN_FILES);
+ bufferCache = TestStorageManagerComponentHolder.getBufferCache(ctx);
+ IFileMapProvider fmp = TestStorageManagerComponentHolder.getFileMapProvider(ctx);
+ FileReference file = new FileReference(new File(fileName));
+ bufferCache.createFile(file);
+ lsmtreeFileId = fmp.lookupFileId(file);
+ bufferCache.openFile(lsmtreeFileId);
+
+
+ // In Memory
+ InMemoryBufferCacheFactory InMemBufferCacheFactory = new InMemoryBufferCacheFactory(inMemPageSize, inMeNumPages);
+ memBufferCache = InMemBufferCacheFactory.createInMemoryBufferCache();
+
+ lsmtree = LSMTreeUtils.createLSMTree(memBufferCache, bufferCache, lsmtreeFileId, typeTraits, cmp.getComparators(), BTreeLeafFrameType.REGULAR_NSM, (IFileMapManager)fmp);
+ }
+ @Override
+ public void init() throws Exception {
+ }
+
+ @Override
+ public long runExperiment(DataGenThread dataGen, int numThreads) throws Exception {
+ LSMTreeThread[] threads = new LSMTreeThread[numThreads];
+ int threadNumBatches = numBatches / numThreads;
+ for (int i = 0; i < numThreads; i++) {
+ threads[i] = new LSMTreeThread(dataGen, lsmtree, threadNumBatches);
+ }
+ // Wait until the tupleBatchQueue is completely full.
+ while (dataGen.tupleBatchQueue.remainingCapacity() != 0) {
+ Thread.sleep(10);
+ }
+
+ long start = System.currentTimeMillis();
+ for (int i = 0; i < numThreads; i++) {
+ threads[i].start();
+ }
+ for (int i = 0; i < numThreads; i++) {
+ threads[i].join();
+ }
+ long end = System.currentTimeMillis();
+ long time = end - start;
+ return time;
+ }
+
+ @Override
+ public void reset() throws Exception {
+ lsmtree.create(lsmtreeFileId);
+ }
+
+ @Override
+ public void deinit() throws Exception {
+ bufferCache.closeFile(lsmtreeFileId);
+ bufferCache.close();
+ memBufferCache.closeFile(lsmtreeFileId);
+ memBufferCache.close();
+ }
+
+ public class LSMTreeThread extends Thread {
+ private final DataGenThread dataGen;
+ private final LSMTree lsmTree;
+ private final int numBatches;
+ private final ITreeIndexAccessor lsmTreeAccessor;
+ public LSMTreeThread(DataGenThread dataGen, LSMTree lsmTree, int numBatches) {
+ this.dataGen = dataGen;
+ this.lsmTree = lsmTree;
+ this.numBatches = numBatches;
+ lsmTreeAccessor = lsmTree.createAccessor();
+ }
+
+ @Override
+ public void run() {
+ try {
+ for (int i = 0; i < numBatches; i++) {
+ TupleBatch batch = dataGen.tupleBatchQueue.take();
+ for (int j = 0; j < batch.size(); j++) {
+ try {
+ lsmTreeAccessor.insert(batch.get(j));
+ } catch (TreeIndexException e) {
+ }
+ }
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+ }
+
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/LSMTreeUtils.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/LSMTreeUtils.java
new file mode 100644
index 0000000..22d006c
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/LSMTreeUtils.java
@@ -0,0 +1,46 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.storage.am.btree.exceptions.BTreeException;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeLeafFrameType;
+import edu.uci.ics.hyracks.storage.am.btree.frames.BTreeNSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.util.BTreeUtils;
+import edu.uci.ics.hyracks.storage.am.common.api.IFreePageManager;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.api.ITreeIndexMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.frames.LIFOMetaDataFrameFactory;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.BTreeFactory;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.FreePageManagerFactory;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.InMemoryFreePageManager;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.LSMEntireTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.LSMTree;
+import edu.uci.ics.hyracks.storage.am.lsmtree.impls.LSMTypeAwareTupleWriterFactory;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.IFileMapManager;
+
+public class LSMTreeUtils {
+ public static LSMTree createLSMTree(IBufferCache memCache, IBufferCache bufferCache, int lsmtreeFileId, ITypeTraits[] typeTraits, IBinaryComparator[] cmps, BTreeLeafFrameType leafType, IFileMapManager fileMapManager) throws BTreeException {
+ MultiComparator cmp = new MultiComparator(cmps);
+
+ LSMTypeAwareTupleWriterFactory insertTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, false);
+ LSMTypeAwareTupleWriterFactory deleteTupleWriterFactory = new LSMTypeAwareTupleWriterFactory(typeTraits, true);
+
+ ITreeIndexFrameFactory insertLeafFrameFactory = BTreeUtils.getLeafFrameFactory(insertTupleWriterFactory,leafType);
+ ITreeIndexFrameFactory deleteLeafFrameFactory = BTreeUtils.getLeafFrameFactory(deleteTupleWriterFactory,leafType);
+ ITreeIndexFrameFactory interiorFrameFactory = new BTreeNSMInteriorFrameFactory(insertTupleWriterFactory);
+ ITreeIndexMetaDataFrameFactory metaFrameFactory = new LIFOMetaDataFrameFactory();
+ IFreePageManager memFreePageManager = new InMemoryFreePageManager(memCache.getNumPages(), metaFrameFactory);
+
+ // For the Flush Mechanism
+ LSMEntireTupleWriterFactory flushTupleWriterFactory = new LSMEntireTupleWriterFactory(typeTraits);
+ ITreeIndexFrameFactory flushLeafFrameFactory = BTreeUtils.getLeafFrameFactory(flushTupleWriterFactory,leafType);
+ FreePageManagerFactory freePageManagerFactory = new FreePageManagerFactory(bufferCache, metaFrameFactory);
+ BTreeFactory bTreeFactory = new BTreeFactory(bufferCache, freePageManagerFactory, cmp, typeTraits.length, interiorFrameFactory, flushLeafFrameFactory);
+
+ LSMTree lsmtree = new LSMTree(memCache, bufferCache, typeTraits.length, cmp, memFreePageManager,
+ interiorFrameFactory, insertLeafFrameFactory, deleteLeafFrameFactory, bTreeFactory, fileMapManager);
+ return lsmtree;
+ }
+}
diff --git a/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/PerfExperiment.java b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/PerfExperiment.java
new file mode 100644
index 0000000..ee0f1fd
--- /dev/null
+++ b/hyracks-storage-am-lsmtree/src/main/java/edu/uci/ics/hyracks/storage/am/lsmtree/perf/PerfExperiment.java
@@ -0,0 +1,158 @@
+package edu.uci.ics.hyracks.storage.am.lsmtree.perf;
+
+import java.io.FileOutputStream;
+import java.io.PrintStream;
+import java.util.Enumeration;
+import java.util.logging.Level;
+import java.util.logging.LogManager;
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparator;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.ITypeTraits;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.util.SerdeUtils;
+import edu.uci.ics.hyracks.storage.am.common.ophelpers.MultiComparator;
+import edu.uci.ics.hyracks.storage.am.lsmtree.datagen.DataGenThread;
+
+public class PerfExperiment {
+ public static void main(String[] args) throws Exception {
+ // Disable logging so we can better see the output times.
+ Enumeration<String> loggers = LogManager.getLogManager().getLoggerNames();
+ while(loggers.hasMoreElements()) {
+ String loggerName = loggers.nextElement();
+ Logger logger = LogManager.getLogManager().getLogger(loggerName);
+ logger.setLevel(Level.OFF);
+ }
+
+ //int numTuples = 1000;
+ //int batchSize = 100;
+ int numTuples = 100000; // 100K
+ //int numTuples = 1000000; // 1M
+ //int numTuples = 2000000; // 2M
+ //int numTuples = 3000000; // 3M
+ //int numTuples = 10000000; // 10M
+ //int numTuples = 20000000; // 20M
+ //int numTuples = 30000000; // 30M
+ //int numTuples = 40000000; // 40M
+ //int numTuples = 60000000; // 60M
+ //int numTuples = 100000000; // 100M
+ //int numTuples = 200000000; // 200M
+ int batchSize = 10000;
+ int numBatches = numTuples / batchSize;
+
+ ISerializerDeserializer[] fieldSerdes = new ISerializerDeserializer[] { IntegerSerializerDeserializer.INSTANCE };
+ ITypeTraits[] typeTraits = SerdeUtils.serdesToTypeTraits(fieldSerdes, 30);
+
+ FileOutputStream out;
+ PrintStream p;
+ out = new FileOutputStream("/tmp/testOutput.txt");
+ p = new PrintStream( out );
+
+
+ // Add 1 byte for the null flags.
+ // TODO: hide this in some method.
+ int tupleSize = 4 + 30 + 1;
+
+ IBinaryComparator[] cmps = SerdeUtils.serdesToComparators(fieldSerdes, fieldSerdes.length);
+ MultiComparator cmp = new MultiComparator(cmps);
+
+
+ //Test 30M Btree
+ try{
+ p.println ("Start for test 30M BTree");
+ }catch (Exception e){
+ System.err.println ("Error writing to file");
+ }
+
+
+ //int repeats = 1000;
+ int repeats = 1;
+ long[] times = new long[repeats];
+
+ int numThreads = 1;
+ for (int i = 0; i < repeats; i++) {
+ //ConcurrentSkipListRunner runner = new ConcurrentSkipListRunner(numBatches, batchSize, tupleSize, typeTraits, cmp);
+
+ //InMemoryBTreeRunner runner = new InMemoryBTreeRunner(numBatches, 8192, 100000, typeTraits, cmp);
+ //BTreeBulkLoadRunner runner = new BTreeBulkLoadRunner(numBatches, 8192, 100000, typeTraits, cmp, 1.0f);
+
+ //BTreeRunner runner = new BTreeRunner(numBatches, 8192, 100000, typeTraits, cmp);
+ //String btreeName = "071211";
+ //BTreeSearchRunner runner = new BTreeSearchRunner(btreeName, 10, numBatches, 8192, 25000, typeTraits, cmp);
+ LSMTreeRunner runner = new LSMTreeRunner(numBatches, 8192, 100, 8192, 250, typeTraits, cmp);
+ //LSMTreeSearchRunner runner = new LSMTreeSearchRunner(100000, numBatches, 8192, 24750, 8192, 250, typeTraits, cmp);
+ DataGenThread dataGen = new DataGenThread(numBatches, batchSize, 10, numThreads, fieldSerdes, 30, 50, false);
+ dataGen.start();
+ runner.reset();
+ times[i] = runner.runExperiment(dataGen, numThreads);
+ System.out.println("TIME " + i + ": " + times[i] + "ms");
+ try{
+ p.println ("TIME " + i + ": " + times[i] + "ms");
+ }catch (Exception e){
+ System.err.println ("Error writing to file");
+ }
+ runner.deinit();
+ }
+
+ long avgTime = 0;
+ for (int i = 0; i < repeats; i++) {
+ avgTime += times[i];
+ }
+
+ avgTime /= repeats;
+ System.out.println("AVG TIME: " + avgTime + "ms");
+ try{
+ p.println ("AVG TIME: " + avgTime + "ms");
+ }catch (Exception e){
+ System.err.println ("Error writing to file");
+ }
+
+
+
+/*
+ //Test 30M Btree
+ try{
+ p.println ("Start for test 40M LSMTree");
+ }catch (Exception e){
+ System.err.println ("Error writing to file");
+ }
+
+ numTuples = 40000000; // 40M
+ //numTuples = 1000000; // 100K
+ numBatches = numTuples / batchSize;
+ runner = new BTreeRunner(numBatches, 8192, 100000, typeTraits, cmp);
+
+ runner.init();
+ for (int i = 0; i < repeats; i++) {
+ // LSMTreeRunner runner = new LSMTreeRunner(numBatches, 8192, 100000, typeTraits, cmp);
+ DataGenThread dataGen = new DataGenThread(numBatches, batchSize, 10, numThreads, fieldSerdes, 30, 50, false);
+ dataGen.start();
+ runner.reset();
+ times[i] = runner.runExperiment(dataGen, numThreads);
+ System.out.println("TIME " + i + ": " + times[i] + "ms");
+ try{
+ p.println ("TIME " + i + ": " + times[i] + "ms");
+ }catch (Exception e){
+ System.err.println ("Error writing to file");
+ }
+ // runner.deinit();
+ }
+ runner.deinit();
+
+ avgTime = 0;
+ for (int i = 0; i < repeats; i++) {
+ avgTime += times[i];
+ }
+
+ avgTime /= repeats;
+ System.out.println("AVG TIME: " + avgTime + "ms");
+ try{
+ p.println ("AVG TIME: " + avgTime + "ms");
+ }catch (Exception e){
+ System.err.println ("Error writing to file");
+ }
+ */
+ p.close();
+ }
+}
diff --git a/pom.xml b/pom.xml
index 18c5cb0..68e8097 100644
--- a/pom.xml
+++ b/pom.xml
@@ -92,6 +92,7 @@
<module>hyracks-storage-am-common</module>
<module>hyracks-storage-am-btree</module>
<module>hyracks-storage-am-invertedindex</module>
+ <module>hyracks-storage-am-lsmtree</module>
<module>hyracks-storage-am-rtree</module>
<module>hyracks-test-support</module>
<module>hyracks-tests</module>