added btree example app
git-svn-id: https://hyracks.googlecode.com/svn/trunk@139 123451ca-8445-de46-9d55-352943316053
diff --git a/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml b/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
new file mode 100644
index 0000000..edddd91
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeapp/pom.xml
@@ -0,0 +1,58 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+ <artifactId>btreeapp</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks.examples</groupId>
+ <artifactId>btree-example</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ </parent>
+
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-dependency-plugin</artifactId>
+ <executions>
+ <execution>
+ <id>copy-dependencies</id>
+ <phase>package</phase>
+ <goals>
+ <goal>copy-dependencies</goal>
+ </goals>
+ <configuration>
+ <outputDirectory>target/application/lib</outputDirectory>
+ </configuration>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-5</version>
+ <executions>
+ <execution>
+ <configuration>
+ <descriptors>
+ <descriptor>src/main/assembly/app-assembly.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <goal>attached</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+ <artifactId>btreehelper</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git a/hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml b/hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml
new file mode 100644
index 0000000..43ace6c
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeapp/src/main/assembly/app-assembly.xml
@@ -0,0 +1,13 @@
+<assembly>
+ <id>app-assembly</id>
+ <formats>
+ <format>zip</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <fileSet>
+ <directory>target/application/lib</directory>
+ <outputDirectory>lib</outputDirectory>
+ </fileSet>
+ </fileSets>
+</assembly>
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
new file mode 100644
index 0000000..8587300
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/pom.xml
@@ -0,0 +1,86 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks.examples.text</groupId>
+ <artifactId>btreeclient</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks.examples</groupId>
+ <artifactId>btree-example</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ </parent>
+
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-std</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-btree</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+ <artifactId>btreehelper</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <type>jar</type>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ <plugin>
+ <groupId>org.codehaus.mojo</groupId>
+ <artifactId>appassembler-maven-plugin</artifactId>
+ <executions>
+ <execution>
+ <configuration>
+ <programs>
+ <program>
+ <mainClass>edu.uci.ics.hyracks.examples.btree.client.BTreeBulkLoadExample</mainClass>
+ <name>btreebulkload</name>
+ </program>
+ </programs>
+ <repositoryLayout>flat</repositoryLayout>
+ <repositoryName>lib</repositoryName>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <goal>assemble</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ <plugin>
+ <artifactId>maven-assembly-plugin</artifactId>
+ <version>2.2-beta-5</version>
+ <executions>
+ <execution>
+ <configuration>
+ <descriptors>
+ <descriptor>src/main/assembly/binary-assembly.xml</descriptor>
+ </descriptors>
+ </configuration>
+ <phase>package</phase>
+ <goals>
+ <goal>attached</goal>
+ </goals>
+ </execution>
+ </executions>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/assembly/binary-assembly.xml b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/assembly/binary-assembly.xml
new file mode 100644
index 0000000..0500499
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/assembly/binary-assembly.xml
@@ -0,0 +1,19 @@
+<assembly>
+ <id>binary-assembly</id>
+ <formats>
+ <format>zip</format>
+ <format>dir</format>
+ </formats>
+ <includeBaseDirectory>false</includeBaseDirectory>
+ <fileSets>
+ <fileSet>
+ <directory>target/appassembler/bin</directory>
+ <outputDirectory>bin</outputDirectory>
+ <fileMode>0755</fileMode>
+ </fileSet>
+ <fileSet>
+ <directory>target/appassembler/lib</directory>
+ <outputDirectory>lib</outputDirectory>
+ </fileSet>
+ </fileSets>
+</assembly>
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
new file mode 100644
index 0000000..2e57487
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/InsertPipelineExample.java
@@ -0,0 +1,173 @@
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import java.util.UUID;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksRMIConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.AbsoluteLocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.ExplicitPartitionConstraint;
+import edu.uci.ics.hyracks.api.constraints.LocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraint;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.hash.UTF8StringBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNHashPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeInsertUpdateDeleteOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.impls.BTreeOp;
+
+// This example will insert tuples into the primary and secondary index using an insert pipeline
+
+public class InsertPipelineExample {
+ private static class Options {
+ @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+ public String host;
+
+ @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1099)")
+ public int port = 1099;
+
+ @Option(name = "-app", usage = "Hyracks Application name", required = true)
+ public String app;
+
+ @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+ public String ncs;
+
+ @Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for insertion", required = true)
+ public int numTuples;
+
+ @Option(name = "-primary-name", usage = "B-Tree file name of primary index", required = true)
+ public String primaryBtreeName;
+
+ @Option(name = "-secondary-name", usage = "B-Tree file name of secondary index", required = true)
+ public String secondaryBtreeName;
+ }
+
+ public static void main(String[] args) throws Exception {
+ Options options = new Options();
+ CmdLineParser parser = new CmdLineParser(options);
+ parser.parseArgument(args);
+
+ IHyracksClientConnection hcc = new HyracksRMIConnection(options.host, options.port);
+
+ JobSpecification job = createJob(options);
+
+ long start = System.currentTimeMillis();
+ UUID jobId = hcc.createJob(options.app, job);
+ hcc.start(jobId);
+ hcc.waitForCompletion(jobId);
+ long end = System.currentTimeMillis();
+ System.err.println(start + " " + end + " " + (end - start));
+ }
+
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // schema of tuples to be generated: 4 fields with int, string, string, string
+ // we will use field 2 as primary key to fill a clustered index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, // this field will not go into B-Tree
+ UTF8StringSerializerDeserializer.INSTANCE, // we will use this as payload
+ IntegerSerializerDeserializer.INSTANCE, // we will use this field as key
+ IntegerSerializerDeserializer.INSTANCE, // we will use this as payload
+ UTF8StringSerializerDeserializer.INSTANCE // we will use this as payload
+ });
+
+ // generate numRecords records with field 2 being unique, integer values in [0, 100000], and strings with max length of 10 characters, and random seed 100
+ DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0, 100000, 10, 100);
+ // run data generator on first nodecontroller given
+ PartitionConstraint dataGenConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(splitNCs[0]) });
+ dataGen.setPartitionConstraint(dataGenConstraint);
+
+ // create factories and providers for B-Trees
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory();
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory();
+ IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
+
+ // prepare insertion into primary index
+ // tuples to be put into B-Tree shall have 4 fields
+ int primaryFieldCount = 4;
+ // the B-Tree expects its keyfields to be at the front of its input tuple
+ int[] primaryFieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple to field 0 of B-Tree tuple, etc.
+ // comparator factories for primary index
+ IBinaryComparatorFactory[] primaryComparatorFactories = new IBinaryComparatorFactory[1];
+ primaryComparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+ // create operator descriptor
+ BTreeInsertUpdateDeleteOperatorDescriptor primaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, options.primaryBtreeName, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, primaryFieldCount, primaryComparatorFactories, primaryFieldPermutation, BTreeOp.BTO_INSERT);
+ PartitionConstraint primaryInsertConstraint = createPartitionConstraint(splitNCs);
+ primaryInsert.setPartitionConstraint(primaryInsertConstraint);
+
+ // prepare insertion into secondary index
+ // tuples to be put into B-Tree shall have 2 fields
+ int secondaryFieldCount = 2;
+ // the B-Tree expects its keyfields to be at the front of its input tuple
+ int[] secondaryFieldPermutation = { 1, 2 };
+ // comparator factories for primary index
+ IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[2];
+ secondaryComparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+ secondaryComparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+ // create operator descriptor
+ BTreeInsertUpdateDeleteOperatorDescriptor secondaryInsert = new BTreeInsertUpdateDeleteOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, options.secondaryBtreeName, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, secondaryFieldCount, secondaryComparatorFactories, secondaryFieldPermutation, BTreeOp.BTO_INSERT);
+ PartitionConstraint secondaryInsertConstraint = createPartitionConstraint(splitNCs);
+ secondaryInsert.setPartitionConstraint(secondaryInsertConstraint);
+
+ // end the insert pipeline at this sink operator
+ NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+ PartitionConstraint nullSinkPartitionConstraint = createPartitionConstraint(splitNCs);
+ nullSink.setPartitionConstraint(nullSinkPartitionConstraint);
+
+ // distribute the records from the datagen via hashing to the bulk load ops
+ IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
+ hashFactories[0] = UTF8StringBinaryHashFunctionFactory.INSTANCE;
+ IConnectorDescriptor hashConn = new MToNHashPartitioningConnectorDescriptor(spec,
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
+
+ // connect the ops
+
+ spec.connect(hashConn, dataGen, 0, primaryInsert, 0);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), primaryInsert, 0, secondaryInsert, 0);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), secondaryInsert, 0, nullSink, 0);
+
+ spec.addRoot(nullSink);
+
+ return spec;
+ }
+
+ private static PartitionConstraint createPartitionConstraint(String[] splitNCs) {
+ LocationConstraint[] lConstraints = new LocationConstraint[splitNCs.length];
+ for (int i = 0; i < splitNCs.length; ++i) {
+ lConstraints[i] = new AbsoluteLocationConstraint(splitNCs[i]);
+ }
+ return new ExplicitPartitionConstraint(lConstraints);
+ }
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
new file mode 100644
index 0000000..9f34737
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexBulkLoadExample.java
@@ -0,0 +1,169 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import java.util.UUID;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksRMIConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.AbsoluteLocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.ExplicitPartitionConstraint;
+import edu.uci.ics.hyracks.api.constraints.LocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraint;
+import edu.uci.ics.hyracks.api.dataflow.IConnectorDescriptor;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.hash.UTF8StringBinaryHashFunctionFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.partition.FieldHashPartitionComputerFactory;
+import edu.uci.ics.hyracks.dataflow.std.connectors.MToNHashPartitioningConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.DataGenOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+
+// This example will load a primary index from randomly generated data
+
+public class PrimaryIndexBulkLoadExample {
+ private static class Options {
+ @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+ public String host;
+
+ @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1099)")
+ public int port = 1099;
+
+ @Option(name = "-app", usage = "Hyracks Application name", required = true)
+ public String app;
+
+ @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+ public String ncs;
+
+ @Option(name = "-num-tuples", usage = "Total number of tuples to to be generated for loading", required = true)
+ public int numTuples;
+
+ @Option(name = "-btreename", usage = "B-Tree file name", required = true)
+ public String btreeName;
+
+ @Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
+ public int sbSize = 32768;
+ }
+
+ public static void main(String[] args) throws Exception {
+ Options options = new Options();
+ CmdLineParser parser = new CmdLineParser(options);
+ parser.parseArgument(args);
+
+ IHyracksClientConnection hcc = new HyracksRMIConnection(options.host, options.port);
+
+ JobSpecification job = createJob(options);
+
+ long start = System.currentTimeMillis();
+ UUID jobId = hcc.createJob(options.app, job);
+ hcc.start(jobId);
+ hcc.waitForCompletion(jobId);
+ long end = System.currentTimeMillis();
+ System.err.println(start + " " + end + " " + (end - start));
+ }
+
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // schema of tuples to be generated: 5 fields with string, string, int, int, string
+ // we will use field-index 2 as primary key to fill a clustered index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE, // this field will not go into B-Tree
+ UTF8StringSerializerDeserializer.INSTANCE, // we will use this as payload
+ IntegerSerializerDeserializer.INSTANCE, // we will use this field as key
+ IntegerSerializerDeserializer.INSTANCE, // we will use this as payload
+ UTF8StringSerializerDeserializer.INSTANCE // we will use this as payload
+ });
+
+ // generate numRecords records with field 2 being unique, integer values in [0, 100000], and strings with max length of 10 characters, and random seed 50
+ DataGenOperatorDescriptor dataGen = new DataGenOperatorDescriptor(spec, recDesc, options.numTuples, 2, 0, 100000, 10, 50);
+ // run data generator on first nodecontroller given
+ PartitionConstraint dataGenConstraint = new ExplicitPartitionConstraint(new LocationConstraint[] { new AbsoluteLocationConstraint(splitNCs[0]) });
+ dataGen.setPartitionConstraint(dataGenConstraint);
+
+ // sort the tuples as preparation for bulk load
+ // fields to sort on
+ int[] sortFields = { 2 };
+ // comparators for sort fields
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+ comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+ ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields, comparatorFactories, recDesc);
+ PartitionConstraint sorterConstraint = createPartitionConstraint(splitNCs);
+ sorter.setPartitionConstraint(sorterConstraint);
+
+ // create factories and providers for B-Tree
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory();
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory();
+ IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
+
+ // tuples to be put into B-Tree shall have 4 fields
+ int fieldCount = 4;
+ // the B-Tree expects its keyfields to be at the front of its input tuple
+ int[] fieldPermutation = { 2, 1, 3, 4 }; // map field 2 of input tuple to field 0 of B-Tree tuple, etc.
+ BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec,
+ bufferCacheProvider, btreeRegistryProvider, options.btreeName, fileMappingProviderProvider, interiorFrameFactory,
+ leafFrameFactory, fieldCount, comparatorFactories, fieldPermutation, 0.7f);
+ PartitionConstraint bulkLoadConstraint = createPartitionConstraint(splitNCs);
+ btreeBulkLoad.setPartitionConstraint(bulkLoadConstraint);
+
+ // distribute the records from the datagen via hashing to the bulk load ops
+ IBinaryHashFunctionFactory[] hashFactories = new IBinaryHashFunctionFactory[1];
+ hashFactories[0] = UTF8StringBinaryHashFunctionFactory.INSTANCE;
+ IConnectorDescriptor hashConn = new MToNHashPartitioningConnectorDescriptor(spec,
+ new FieldHashPartitionComputerFactory(new int[] { 0 }, hashFactories));
+
+ spec.connect(hashConn, dataGen, 0, sorter, 0);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+
+ spec.addRoot(btreeBulkLoad);
+
+ return spec;
+ }
+
+ private static PartitionConstraint createPartitionConstraint(String[] splitNCs) {
+ LocationConstraint[] lConstraints = new LocationConstraint[splitNCs.length];
+ for (int i = 0; i < splitNCs.length; ++i) {
+ lConstraints[i] = new AbsoluteLocationConstraint(splitNCs[i]);
+ }
+ return new ExplicitPartitionConstraint(lConstraints);
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
new file mode 100644
index 0000000..c84ed4e
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/PrimaryIndexSearchExample.java
@@ -0,0 +1,142 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import java.util.UUID;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksRMIConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.AbsoluteLocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.ExplicitPartitionConstraint;
+import edu.uci.ics.hyracks.api.constraints.LocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraint;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.ITupleReferenceFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.NullTupleReferenceFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+
+// This example will perform an ordered scan on the primary index
+// i.e. a range-search for [-infinity, +infinity]
+
+public class PrimaryIndexSearchExample {
+ private static class Options {
+ @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+ public String host;
+
+ @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1099)")
+ public int port = 1099;
+
+ @Option(name = "-app", usage = "Hyracks Application name", required = true)
+ public String app;
+
+ @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+ public String ncs;
+
+ @Option(name = "-btreename", usage = "B-Tree file name to search", required = true)
+ public String btreeName;
+ }
+
+ public static void main(String[] args) throws Exception {
+ Options options = new Options();
+ CmdLineParser parser = new CmdLineParser(options);
+ parser.parseArgument(args);
+
+ IHyracksClientConnection hcc = new HyracksRMIConnection(options.host, options.port);
+
+ JobSpecification job = createJob(options);
+
+ long start = System.currentTimeMillis();
+ UUID jobId = hcc.createJob(options.app, job);
+ hcc.start(jobId);
+ hcc.waitForCompletion(jobId);
+ long end = System.currentTimeMillis();
+ System.err.println(start + " " + end + " " + (end - start));
+ }
+
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // create factories and providers for B-Tree
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory();
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory();
+ IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
+
+ // schema of tuples coming out of primary index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE,
+ });
+
+ // comparators for btree
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+ comparatorFactories[0] = IntegerBinaryComparatorFactory.INSTANCE;
+
+ // build search key factories
+ ITupleReferenceFactory[] searchKeys = new ITupleReferenceFactory[2];
+ searchKeys[0] = new NullTupleReferenceFactory(); // equivalent to -infinity
+ searchKeys[1] = new NullTupleReferenceFactory(); // equivalent to +infinity
+
+ BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, options.btreeName, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, recDesc.getFields().length, comparatorFactories, true, searchKeys, comparatorFactories.length);
+ PartitionConstraint btreeSearchConstraint = createPartitionConstraint(splitNCs);
+ btreeSearchOp.setPartitionConstraint(btreeSearchConstraint);
+
+ // have each node print the results of its respective B-Tree
+ PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
+ PartitionConstraint printerConstraint = createPartitionConstraint(splitNCs);
+ printer.setPartitionConstraint(printerConstraint);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), btreeSearchOp, 0, printer, 0);
+
+ spec.addRoot(printer);
+
+ return spec;
+ }
+
+ private static PartitionConstraint createPartitionConstraint(String[] splitNCs) {
+ LocationConstraint[] lConstraints = new LocationConstraint[splitNCs.length];
+ for (int i = 0; i < splitNCs.length; ++i) {
+ lConstraints[i] = new AbsoluteLocationConstraint(splitNCs[i]);
+ }
+ return new ExplicitPartitionConstraint(lConstraints);
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
new file mode 100644
index 0000000..a39126d
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexBulkLoadExample.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import java.util.UUID;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksRMIConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.AbsoluteLocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.ExplicitPartitionConstraint;
+import edu.uci.ics.hyracks.api.constraints.LocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraint;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.IntegerBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.sort.ExternalSortOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeBulkLoadOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeDiskOrderScanOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+
+// This example will load a secondary index with <key, primary-index key> pairs
+// We require an existing primary index built with PrimaryIndexBulkLoadExample
+
+public class SecondaryIndexBulkLoadExample {
+ private static class Options {
+ @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+ public String host;
+
+ @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1099)")
+ public int port = 1099;
+
+ @Option(name = "-app", usage = "Hyracks Application name", required = true)
+ public String app;
+
+ @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+ public String ncs;
+
+ @Option(name = "-primary-btreename", usage = "Name of primary-index B-Tree to load from", required = true)
+ public String primaryBtreeName;
+
+ @Option(name = "-btreename", usage = "B-Tree file name for secondary index to be built", required = true)
+ public String btreeName;
+
+ @Option(name = "-sortbuffer-size", usage = "Sort buffer size in frames (default: 32768)", required = false)
+ public int sbSize = 32768;
+ }
+
+ public static void main(String[] args) throws Exception {
+ Options options = new Options();
+ CmdLineParser parser = new CmdLineParser(options);
+ parser.parseArgument(args);
+
+ IHyracksClientConnection hcc = new HyracksRMIConnection(options.host, options.port);
+
+ JobSpecification job = createJob(options);
+
+ long start = System.currentTimeMillis();
+ UUID jobId = hcc.createJob(options.app, job);
+ hcc.start(jobId);
+ hcc.waitForCompletion(jobId);
+ long end = System.currentTimeMillis();
+ System.err.println(start + " " + end + " " + (end - start));
+ }
+
+ private static JobSpecification createJob(Options options) {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // schema of tuples that we are retrieving from the primary index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ IntegerSerializerDeserializer.INSTANCE, // we will use this as payload in secondary index
+ UTF8StringSerializerDeserializer.INSTANCE, // we will use this ask key in secondary index
+ IntegerSerializerDeserializer.INSTANCE,
+ UTF8StringSerializerDeserializer.INSTANCE
+ });
+
+ // create factories and providers for B-Tree(s)
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory();
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory();
+ IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
+
+ // use a disk-order scan to read primary index
+ BTreeDiskOrderScanOperatorDescriptor btreeScanOp = new BTreeDiskOrderScanOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, options.primaryBtreeName, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, recDesc.getFields().length);
+ PartitionConstraint scanPartitionConstraint = createPartitionConstraint(splitNCs);
+ btreeScanOp.setPartitionConstraint(scanPartitionConstraint);
+
+ // sort the tuples as preparation for bulk load into secondary index
+ // fields to sort on
+ int[] sortFields = { 1, 0 };
+ // comparators for sort fields
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[2];
+ comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+ comparatorFactories[1] = IntegerBinaryComparatorFactory.INSTANCE;
+ ExternalSortOperatorDescriptor sorter = new ExternalSortOperatorDescriptor(spec, options.sbSize, sortFields, comparatorFactories, recDesc);
+ PartitionConstraint sorterConstraint = createPartitionConstraint(splitNCs);
+ sorter.setPartitionConstraint(sorterConstraint);
+
+ // tuples to be put into B-Tree shall have 2 fields
+ int fieldCount = 2;
+ // the B-Tree expects its keyfields to be at the front of its input tuple
+ int[] fieldPermutation = { 1, 0 };
+ BTreeBulkLoadOperatorDescriptor btreeBulkLoad = new BTreeBulkLoadOperatorDescriptor(spec,
+ bufferCacheProvider, btreeRegistryProvider, options.btreeName, fileMappingProviderProvider, interiorFrameFactory,
+ leafFrameFactory, fieldCount, comparatorFactories, fieldPermutation, 0.7f);
+ PartitionConstraint bulkLoadConstraint = createPartitionConstraint(splitNCs);
+ btreeBulkLoad.setPartitionConstraint(bulkLoadConstraint);
+
+ // connect the ops
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), btreeScanOp, 0, sorter, 0);
+
+ //spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+ spec.connect(new OneToOneConnectorDescriptor(spec), sorter, 0, btreeBulkLoad, 0);
+
+ spec.addRoot(btreeBulkLoad);
+
+ return spec;
+ }
+
+ private static PartitionConstraint createPartitionConstraint(String[] splitNCs) {
+ LocationConstraint[] lConstraints = new LocationConstraint[splitNCs.length];
+ for (int i = 0; i < splitNCs.length; ++i) {
+ lConstraints[i] = new AbsoluteLocationConstraint(splitNCs[i]);
+ }
+ return new ExplicitPartitionConstraint(lConstraints);
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
new file mode 100644
index 0000000..cbce6f6
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreeclient/src/main/java/edu/uci/ics/hyracks/examples/btree/client/SecondaryIndexSearchExample.java
@@ -0,0 +1,176 @@
+/*
+ * Copyright 2009-2010 by The Regents of the University of California
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * you may obtain a copy of the License from
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package edu.uci.ics.hyracks.examples.btree.client;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+import java.util.UUID;
+
+import org.kohsuke.args4j.CmdLineParser;
+import org.kohsuke.args4j.Option;
+
+import edu.uci.ics.hyracks.api.client.HyracksRMIConnection;
+import edu.uci.ics.hyracks.api.client.IHyracksClientConnection;
+import edu.uci.ics.hyracks.api.constraints.AbsoluteLocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.ExplicitPartitionConstraint;
+import edu.uci.ics.hyracks.api.constraints.LocationConstraint;
+import edu.uci.ics.hyracks.api.constraints.PartitionConstraint;
+import edu.uci.ics.hyracks.api.context.IHyracksContext;
+import edu.uci.ics.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import edu.uci.ics.hyracks.api.dataflow.value.ISerializerDeserializer;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.control.nc.runtime.HyracksContext;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.data.comparators.UTF8StringBinaryComparatorFactory;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.misc.PrinterOperatorDescriptor;
+import edu.uci.ics.hyracks.examples.btree.helper.BTreeRegistryProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.BufferCacheProvider;
+import edu.uci.ics.hyracks.examples.btree.helper.FileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.api.IBTreeLeafFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeSearchOperatorDescriptor;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.FrameTupleReferenceFactory;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.ITupleReferenceFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMInteriorFrameFactory;
+import edu.uci.ics.hyracks.storage.am.btree.frames.NSMLeafFrameFactory;
+
+// This example will perform range search on the secondary index
+
+public class SecondaryIndexSearchExample {
+ private static class Options {
+ @Option(name = "-host", usage = "Hyracks Cluster Controller Host name", required = true)
+ public String host;
+
+ @Option(name = "-port", usage = "Hyracks Cluster Controller Port (default: 1099)")
+ public int port = 1099;
+
+ @Option(name = "-app", usage = "Hyracks Application name", required = true)
+ public String app;
+
+ @Option(name = "-target-ncs", usage = "Comma separated list of node-controller names to use", required = true)
+ public String ncs;
+
+ @Option(name = "-btreename", usage = "B-Tree file name to search", required = true)
+ public String btreeName;
+ }
+
+ public static void main(String[] args) throws Exception {
+ Options options = new Options();
+ CmdLineParser parser = new CmdLineParser(options);
+ parser.parseArgument(args);
+
+ IHyracksClientConnection hcc = new HyracksRMIConnection(options.host, options.port);
+
+ JobSpecification job = createJob(options);
+
+ long start = System.currentTimeMillis();
+ UUID jobId = hcc.createJob(options.app, job);
+ hcc.start(jobId);
+ hcc.waitForCompletion(jobId);
+ long end = System.currentTimeMillis();
+ System.err.println(start + " " + end + " " + (end - start));
+ }
+
+ private static JobSpecification createJob(Options options) throws HyracksDataException {
+
+ JobSpecification spec = new JobSpecification();
+
+ String[] splitNCs = options.ncs.split(",");
+
+ // create factories and providers for B-Tree
+ IBTreeInteriorFrameFactory interiorFrameFactory = new NSMInteriorFrameFactory();
+ IBTreeLeafFrameFactory leafFrameFactory = new NSMLeafFrameFactory();
+ IBufferCacheProvider bufferCacheProvider = BufferCacheProvider.INSTANCE;
+ IBTreeRegistryProvider btreeRegistryProvider = BTreeRegistryProvider.INSTANCE;
+ IFileMappingProviderProvider fileMappingProviderProvider = FileMappingProviderProvider.INSTANCE;
+
+ // schema of tuples coming out of secondary index
+ RecordDescriptor recDesc = new RecordDescriptor(new ISerializerDeserializer[] {
+ UTF8StringSerializerDeserializer.INSTANCE,
+ IntegerSerializerDeserializer.INSTANCE
+ });
+
+ // comparators for btree, note that we only need a comparator for the non-unique key
+ // i.e. we will have a range condition on the first field only (implying [-infinity, +infinity] for the second field)
+ IBinaryComparatorFactory[] comparatorFactories = new IBinaryComparatorFactory[1];
+ comparatorFactories[0] = UTF8StringBinaryComparatorFactory.INSTANCE;
+
+
+ // build search keys (which must be of type ITupleReference)
+ // put search keys into frame and create tuplereference factories
+ IHyracksContext ctx = new HyracksContext(32768); // WARNING: make sure frame size is same as on NCs
+ ByteBuffer keyFrame = ctx.getResourceManager().allocateFrame();
+ FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ appender.reset(keyFrame, true);
+ ArrayTupleBuilder tb = new ArrayTupleBuilder(comparatorFactories.length);
+ DataOutput dos = tb.getDataOutput();
+
+ ISerializerDeserializer[] keyRecDescSers = { UTF8StringSerializerDeserializer.INSTANCE };
+ RecordDescriptor keyRecDesc = new RecordDescriptor(keyRecDescSers);
+
+ // build low key
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("a", dos);
+ tb.addFieldEndOffset();
+
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ // build high key
+ tb.reset();
+ UTF8StringSerializerDeserializer.INSTANCE.serialize("f", dos);
+ tb.addFieldEndOffset();
+
+ appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize());
+
+ // build search key factories
+ ITupleReferenceFactory[] searchKeys = new ITupleReferenceFactory[2];
+ // the low key is tuple 0 in the keyFrame
+ searchKeys[0] = new FrameTupleReferenceFactory(keyFrame.array(), 0, keyRecDesc);
+ // the high key is tuple 1 in the keyFrame
+ searchKeys[1] = new FrameTupleReferenceFactory(keyFrame.array(), 1, keyRecDesc);
+
+ BTreeSearchOperatorDescriptor btreeSearchOp = new BTreeSearchOperatorDescriptor(spec, recDesc, bufferCacheProvider, btreeRegistryProvider, options.btreeName, fileMappingProviderProvider, interiorFrameFactory, leafFrameFactory, recDesc.getFields().length, comparatorFactories, true, searchKeys, comparatorFactories.length);
+ PartitionConstraint btreeSearchConstraint = createPartitionConstraint(splitNCs);
+ btreeSearchOp.setPartitionConstraint(btreeSearchConstraint);
+
+ // have each node print the results of its respective B-Tree
+ PrinterOperatorDescriptor printer = new PrinterOperatorDescriptor(spec);
+ PartitionConstraint printerConstraint = createPartitionConstraint(splitNCs);
+ printer.setPartitionConstraint(printerConstraint);
+
+ spec.connect(new OneToOneConnectorDescriptor(spec), btreeSearchOp, 0, printer, 0);
+
+ spec.addRoot(printer);
+
+ return spec;
+ }
+
+ private static PartitionConstraint createPartitionConstraint(String[] splitNCs) {
+ LocationConstraint[] lConstraints = new LocationConstraint[splitNCs.length];
+ for (int i = 0; i < splitNCs.length; ++i) {
+ lConstraints[i] = new AbsoluteLocationConstraint(splitNCs[i]);
+ }
+ return new ExplicitPartitionConstraint(lConstraints);
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
new file mode 100644
index 0000000..cc55d4a
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/pom.xml
@@ -0,0 +1,46 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks.examples.btree</groupId>
+ <artifactId>btreehelper</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks.examples</groupId>
+ <artifactId>btree-example</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ </parent>
+
+ <dependencies>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-dataflow-std</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-storage-am-btree</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ <dependency>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-api</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <scope>compile</scope>
+ </dependency>
+ </dependencies>
+ <build>
+ <plugins>
+ <plugin>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-compiler-plugin</artifactId>
+ <version>2.0.2</version>
+ <configuration>
+ <source>1.6</source>
+ <target>1.6</target>
+ </configuration>
+ </plugin>
+ </plugins>
+ </build>
+</project>
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java
new file mode 100644
index 0000000..4c3c7524
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BTreeRegistryProvider.java
@@ -0,0 +1,19 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBTreeRegistryProvider;
+
+public class BTreeRegistryProvider implements IBTreeRegistryProvider {
+
+ private static final long serialVersionUID = 1L;
+
+ public static final BTreeRegistryProvider INSTANCE = new BTreeRegistryProvider();
+
+ private BTreeRegistryProvider() {
+ }
+
+ @Override
+ public BTreeRegistry getBTreeRegistry() {
+ return RuntimeContext.getInstance().getBTreeRegistry();
+ }
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BufferCacheProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BufferCacheProvider.java
new file mode 100644
index 0000000..db1368d
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/BufferCacheProvider.java
@@ -0,0 +1,24 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IBufferCacheProvider;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.FileManager;
+
+public class BufferCacheProvider implements IBufferCacheProvider {
+ private static final long serialVersionUID = 1L;
+
+ public static final BufferCacheProvider INSTANCE = new BufferCacheProvider();
+
+ private BufferCacheProvider() {
+ }
+
+ @Override
+ public IBufferCache getBufferCache() {
+ return RuntimeContext.getInstance().getBufferCache();
+ }
+
+ @Override
+ public FileManager getFileManager() {
+ return RuntimeContext.getInstance().getFileManager();
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
new file mode 100644
index 0000000..0ee9210
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/DataGenOperatorDescriptor.java
@@ -0,0 +1,148 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import java.io.DataOutput;
+import java.nio.ByteBuffer;
+import java.util.HashSet;
+import java.util.Random;
+
+import edu.uci.ics.hyracks.api.context.IHyracksContext;
+import edu.uci.ics.hyracks.api.dataflow.IOperatorNodePushable;
+import edu.uci.ics.hyracks.api.dataflow.value.IRecordDescriptorProvider;
+import edu.uci.ics.hyracks.api.dataflow.value.RecordDescriptor;
+import edu.uci.ics.hyracks.api.exceptions.HyracksDataException;
+import edu.uci.ics.hyracks.api.job.IOperatorEnvironment;
+import edu.uci.ics.hyracks.api.job.JobSpecification;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
+import edu.uci.ics.hyracks.dataflow.common.comm.io.FrameTupleAppender;
+import edu.uci.ics.hyracks.dataflow.common.comm.util.FrameUtils;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.IntegerSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.common.data.marshalling.UTF8StringSerializerDeserializer;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractSingleActivityOperatorDescriptor;
+import edu.uci.ics.hyracks.dataflow.std.base.AbstractUnaryOutputSourceOperatorNodePushable;
+
+public class DataGenOperatorDescriptor extends AbstractSingleActivityOperatorDescriptor{
+
+ private static final long serialVersionUID = 1L;
+ private final int numRecords;
+
+ private final int intMinVal;
+ private final int intMaxVal;
+ private final int maxStrLen;
+ private final int uniqueField;
+ private final long randomSeed;
+
+ public DataGenOperatorDescriptor(JobSpecification spec, RecordDescriptor outputRecord, int numRecords, int uniqueField, int intMinVal, int intMaxVal, int maxStrLen, long randomSeed) {
+ super(spec, 0, 1);
+ this.numRecords = numRecords;
+ this.uniqueField = uniqueField;
+ this.intMinVal = intMinVal;
+ this.intMaxVal = intMaxVal;
+ this.maxStrLen = maxStrLen;
+ this.randomSeed = randomSeed;
+ recordDescriptors[0] = outputRecord;
+ }
+
+ @Override
+ public IOperatorNodePushable createPushRuntime(IHyracksContext ctx,
+ IOperatorEnvironment env,
+ IRecordDescriptorProvider recordDescProvider, int partition,
+ int nPartitions) {
+
+ final ByteBuffer outputFrame = ctx.getResourceManager().allocateFrame();
+ final FrameTupleAppender appender = new FrameTupleAppender(ctx);
+ final RecordDescriptor recDesc = recordDescriptors[0];
+ final ArrayTupleBuilder tb = new ArrayTupleBuilder(recDesc.getFields().length);
+ final Random rnd = new Random(randomSeed);
+ final int maxUniqueAttempts = 20;
+
+ return new AbstractUnaryOutputSourceOperatorNodePushable() {
+
+ // for quick & dirty exclusion of duplicates
+ // WARNING: could contain numRecord entries and use a lot of memory
+ HashSet<String> stringHs = new HashSet<String>();
+ HashSet<Integer> intHs = new HashSet<Integer>();
+
+ @Override
+ public void initialize() throws HyracksDataException {
+ writer.open();
+ try {
+ appender.reset(outputFrame, true);
+ for(int i = 0; i < numRecords; i++) {
+ tb.reset();
+ for(int j = 0; j < recDesc.getFields().length; j++) {
+ genField(tb, j);
+ }
+
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ FrameUtils.flushFrame(outputFrame, writer);
+ appender.reset(outputFrame, true);
+ if (!appender.append(tb.getFieldEndOffsets(), tb.getByteArray(), 0, tb.getSize())) {
+ throw new IllegalStateException();
+ }
+ }
+ }
+ FrameUtils.flushFrame(outputFrame, writer);
+ }
+ finally {
+ writer.close();
+ }
+ }
+
+ private void genField(ArrayTupleBuilder tb, int fieldIndex) throws HyracksDataException {
+ DataOutput dos = tb.getDataOutput();
+ if(recDesc.getFields()[fieldIndex] instanceof IntegerSerializerDeserializer) {
+ int val = -1;
+ if(fieldIndex == uniqueField) {
+ int attempt = 0;
+ while(attempt < maxUniqueAttempts) {
+ int tmp = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
+ if(intHs.contains(tmp)) attempt++;
+ else {
+ val = tmp;
+ intHs.add(val);
+ break;
+ }
+ }
+ if(attempt == maxUniqueAttempts) throw new HyracksDataException("MaxUnique attempts reached in datagen");
+ }
+ else {
+ val = Math.abs(rnd.nextInt()) % (intMaxVal - intMinVal) + intMinVal;
+ }
+ recDesc.getFields()[fieldIndex].serialize(val, dos);
+ tb.addFieldEndOffset();
+ } else if (recDesc.getFields()[fieldIndex] instanceof UTF8StringSerializerDeserializer) {
+ String val = null;
+ if(fieldIndex == uniqueField) {
+ int attempt = 0;
+ while(attempt < maxUniqueAttempts) {
+ String tmp = randomString(maxStrLen, rnd);
+ if(stringHs.contains(tmp)) attempt++;
+ else {
+ val = tmp;
+ stringHs.add(val);
+ break;
+ }
+ }
+ if(attempt == maxUniqueAttempts) throw new HyracksDataException("MaxUnique attempts reached in datagen");
+ }
+ else {
+ val = randomString(maxStrLen, rnd);
+ }
+ recDesc.getFields()[fieldIndex].serialize(val, dos);
+ tb.addFieldEndOffset();
+ } else {
+ throw new HyracksDataException("Type unsupported in data generator. Only integers and strings allowed");
+ }
+ }
+
+ private String randomString(int length, Random random) {
+ String s = Long.toHexString(Double.doubleToLongBits(random.nextDouble()));
+ StringBuilder strBuilder = new StringBuilder();
+ for (int i = 0; i < s.length() && i < length; i++) {
+ strBuilder.append(s.charAt(Math.abs(random.nextInt()) % s.length()));
+ }
+ return strBuilder.toString();
+ }
+ };
+ }
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProvider.java
new file mode 100644
index 0000000..c63be46
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProvider.java
@@ -0,0 +1,36 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import java.util.Hashtable;
+import java.util.Map;
+
+import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
+
+public class FileMappingProvider implements IFileMappingProvider {
+
+ private static final long serialVersionUID = 1L;
+ private int nextFileId = 0;
+ private Map<String, Integer> map = new Hashtable<String, Integer>();
+
+ @Override
+ public Integer mapNameToFileId(String name, boolean create) {
+ Integer val = map.get(name);
+ if(create) {
+ if(val == null) {
+ int ret = nextFileId;
+ map.put(name, nextFileId++);
+ return ret;
+ }
+ else {
+ return null; // create requested but value already exists
+ }
+ }
+ else {
+ return val; // just return value
+ }
+ }
+
+ @Override
+ public void unmapName(String name) {
+ map.remove(name);
+ }
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProviderProvider.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProviderProvider.java
new file mode 100644
index 0000000..4f2c8f4
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/FileMappingProviderProvider.java
@@ -0,0 +1,15 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.IFileMappingProviderProvider;
+import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
+
+public class FileMappingProviderProvider implements IFileMappingProviderProvider {
+ private static final long serialVersionUID = 1L;
+
+ public static final IFileMappingProviderProvider INSTANCE = new FileMappingProviderProvider();
+
+ @Override
+ public IFileMappingProvider getFileMappingProvider() {
+ return RuntimeContext.getInstance().getFileMappingProvider();
+ }
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
new file mode 100644
index 0000000..4be1349
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/NCBootstrap.java
@@ -0,0 +1,30 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import java.util.logging.Logger;
+
+import edu.uci.ics.hyracks.api.application.IApplicationContext;
+import edu.uci.ics.hyracks.api.application.IBootstrap;
+
+public class NCBootstrap implements IBootstrap {
+ private static final Logger LOGGER = Logger.getLogger(NCBootstrap.class.getName());
+
+ private IApplicationContext appCtx;
+
+ @Override
+ public void start() throws Exception {
+ LOGGER.info("Starting NC Bootstrap");
+ RuntimeContext.initialize();
+ LOGGER.info("Initialized RuntimeContext: " + RuntimeContext.getInstance());
+ }
+
+ @Override
+ public void stop() throws Exception {
+ LOGGER.info("Stopping Asterix NC Bootstrap");
+ RuntimeContext.deinitialize();
+ }
+
+ @Override
+ public void setApplicationContext(IApplicationContext appCtx) {
+ this.appCtx = appCtx;
+ }
+}
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
new file mode 100644
index 0000000..d542ed9
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/java/edu/uci/ics/hyracks/examples/btree/helper/RuntimeContext.java
@@ -0,0 +1,70 @@
+package edu.uci.ics.hyracks.examples.btree.helper;
+
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.BTreeRegistry;
+import edu.uci.ics.hyracks.storage.am.btree.dataflow.FileMappingProvider;
+import edu.uci.ics.hyracks.storage.common.buffercache.BufferCache;
+import edu.uci.ics.hyracks.storage.common.buffercache.ClockPageReplacementStrategy;
+import edu.uci.ics.hyracks.storage.common.buffercache.HeapBufferAllocator;
+import edu.uci.ics.hyracks.storage.common.buffercache.IBufferCache;
+import edu.uci.ics.hyracks.storage.common.file.FileManager;
+import edu.uci.ics.hyracks.storage.common.file.IFileMappingProvider;
+
+public class RuntimeContext {
+ private static RuntimeContext INSTANCE;
+
+ private FileManager fileManager;
+ private IBufferCache bufferCache;
+ private BTreeRegistry btreeRegistry;
+ private IFileMappingProvider fileMappingProvider;
+
+ private RuntimeContext() {
+ }
+
+ public static void initialize() {
+ if (INSTANCE != null) {
+ throw new IllegalStateException("Instance already initialized");
+ }
+ INSTANCE = new RuntimeContext();
+ INSTANCE.start();
+ }
+
+ public static void deinitialize() {
+ if (INSTANCE != null) {
+ INSTANCE.stop();
+ INSTANCE = null;
+ }
+ }
+
+ private void stop() {
+ bufferCache.close();
+ fileManager.close();
+ }
+
+ private void start() {
+ fileManager = new FileManager();
+ bufferCache = new BufferCache(new HeapBufferAllocator(), new ClockPageReplacementStrategy(), fileManager,
+ 32768, 1024);
+ btreeRegistry = new BTreeRegistry();
+ fileMappingProvider = new FileMappingProvider();
+ }
+
+ public static RuntimeContext getInstance() {
+ return INSTANCE;
+ }
+
+ public IBufferCache getBufferCache() {
+ return bufferCache;
+ }
+
+ public FileManager getFileManager() {
+ return fileManager;
+ }
+
+ public BTreeRegistry getBTreeRegistry() {
+ return btreeRegistry;
+ }
+
+ public IFileMappingProvider getFileMappingProvider() {
+ return fileMappingProvider;
+ }
+}
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties
new file mode 100644
index 0000000..ab0ecb3
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/btreehelper/src/main/resources/hyracks-deployment.properties
@@ -0,0 +1 @@
+nc.bootstrap.class=edu.uci.ics.hyracks.examples.btree.helper.NCBootstrap
\ No newline at end of file
diff --git a/hyracks/hyracks-examples/btree-example/pom.xml b/hyracks/hyracks-examples/btree-example/pom.xml
new file mode 100644
index 0000000..9f84eb6
--- /dev/null
+++ b/hyracks/hyracks-examples/btree-example/pom.xml
@@ -0,0 +1,19 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>edu.uci.ics.hyracks.examples</groupId>
+ <artifactId>btree-example</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ <packaging>pom</packaging>
+
+ <parent>
+ <groupId>edu.uci.ics.hyracks</groupId>
+ <artifactId>hyracks-examples</artifactId>
+ <version>0.1.3-SNAPSHOT</version>
+ </parent>
+
+ <modules>
+ <module>btreehelper</module>
+ <module>btreeclient</module>
+ <module>btreeapp</module>
+ </modules>
+</project>
diff --git a/hyracks/hyracks-examples/pom.xml b/hyracks/hyracks-examples/pom.xml
index 9596b37..f8e8820 100644
--- a/hyracks/hyracks-examples/pom.xml
+++ b/hyracks/hyracks-examples/pom.xml
@@ -14,6 +14,7 @@
<modules>
<module>tpch-example</module>
<module>text-example</module>
+ <module>btree-example</module>
<module>hyracks-integration-tests</module>
</modules>
</project>